hunk
dict | file
stringlengths 0
11.8M
| file_path
stringlengths 2
234
| label
int64 0
1
| commit_url
stringlengths 74
103
| dependency_score
listlengths 5
5
|
---|---|---|---|---|---|
{
"id": 3,
"code_window": [
"/// <reference path='fourslash.ts' />\n",
"\n",
"// @Filename: justAComment.ts\n",
"//// /* /*0*/ */\n",
"\n",
"verify.noDocCommentTemplateAt(\"0\");\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"0\");"
],
"file_path": "tests/cases/fourslash/docCommentTemplateInMultiLineComment.ts",
"type": "replace",
"edit_start_line_idx": 5
} | tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts(2,5): error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts(6,5): error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts(18,5): error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts(22,5): error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts(37,24): error TS2313: Type parameter 'T' has a circular constraint.
tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts(37,30): error TS2536: Type '"hello"' cannot be used to index type 'T'.
==== tests/cases/conformance/types/keyof/circularIndexedAccessErrors.ts (6 errors) ====
type T1 = {
x: T1["x"]; // Error
~~~~~~~~~~~
!!! error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
};
type T2<K extends "x" | "y"> = {
x: T2<K>[K]; // Error
~~~~~~~~~~~~
!!! error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
y: number;
}
declare let x2: T2<"x">;
let x2x = x2.x;
interface T3<T extends T3<T>> {
x: T["x"];
}
interface T4<T extends T4<T>> {
x: T4<T>["x"]; // Error
~~~~~~~~~~~~~~
!!! error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
}
class C1 {
x: C1["x"]; // Error
~~~~~~~~~~~
!!! error TS2502: 'x' is referenced directly or indirectly in its own type annotation.
}
class C2 {
x: this["y"];
y: this["z"];
z: this["x"];
}
// Repro from #12627
interface Foo {
hello: boolean;
}
function foo<T extends Foo | T["hello"]>() {
~~~~~~~~~~~~~~~~
!!! error TS2313: Type parameter 'T' has a circular constraint.
~~~~~~~~~~
!!! error TS2536: Type '"hello"' cannot be used to index type 'T'.
}
| tests/baselines/reference/circularIndexedAccessErrors.errors.txt | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.0001790691021597013,
0.00017218240827787668,
0.0001677027903497219,
0.000171673993463628,
0.000003293246436442132
]
|
{
"id": 3,
"code_window": [
"/// <reference path='fourslash.ts' />\n",
"\n",
"// @Filename: justAComment.ts\n",
"//// /* /*0*/ */\n",
"\n",
"verify.noDocCommentTemplateAt(\"0\");\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"0\");"
],
"file_path": "tests/cases/fourslash/docCommentTemplateInMultiLineComment.ts",
"type": "replace",
"edit_start_line_idx": 5
} | tests/cases/conformance/jsx/file.tsx(12,5): error TS2339: Property 'other' does not exist on type 'typeof my'.
tests/cases/conformance/jsx/file.tsx(19,11): error TS2339: Property 'non' does not exist on type 'typeof my'.
==== tests/cases/conformance/jsx/file.tsx (2 errors) ====
declare module JSX {
interface Element { }
interface IntrinsicElements { }
}
module my {
export var div: any;
}
// OK
<my.div n='x' />;
// Error
<my.other />;
~~~~~
!!! error TS2339: Property 'other' does not exist on type 'typeof my'.
module q {
import mine = my;
// OK
<mine.div n='x' />;
// Error
<mine.non />;
~~~
!!! error TS2339: Property 'non' does not exist on type 'typeof my'.
}
| tests/baselines/reference/tsxElementResolution7.errors.txt | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.0001790691021597013,
0.00017405110702384263,
0.00017142173601314425,
0.00017285678768530488,
0.00000295842187369999
]
|
{
"id": 4,
"code_window": [
"//// // /*2*/\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateInSingleLineComment.ts",
"type": "replace",
"edit_start_line_idx": 11
} | /// <reference path='fourslash.ts' />
/////*top*/
////namespace n1.
//// /*n2*/ n2.
//// /*n3*/ n3 {
////}
verify.docCommentTemplateAt("top", /*indentation*/ 8,
`/**
*
*/`);
verify.noDocCommentTemplateAt("n2");
verify.noDocCommentTemplateAt("n3");
| tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts | 1 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.001618849695660174,
0.001183712505735457,
0.00074857531581074,
0.001183712505735457,
0.00043513718992471695
]
|
{
"id": 4,
"code_window": [
"//// // /*2*/\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateInSingleLineComment.ts",
"type": "replace",
"edit_start_line_idx": 11
} | EmitSkipped: false
FileName : sample/outDir/inputFile1.js.map
{"version":3,"file":"inputFile1.js","sourceRoot":"","sources":["../../tests/cases/fourslash/inputFile1.ts"],"names":[],"mappings":"AAAA,IAAI,CAAC,GAAG,GAAG,CAAC;AACZ,IAAI,GAAG,GAAG,aAAa,CAAC;AACxB;IAAA;IAGA,CAAC;IAAD,QAAC;AAAD,CAAC,AAHD,IAGC"}FileName : sample/outDir/inputFile1.js
var x = 109;
var foo = "hello world";
var M = /** @class */ (function () {
function M() {
}
return M;
}());
//# sourceMappingURL=inputFile1.js.map
EmitSkipped: false
FileName : sample/outDir/inputFile2.js.map
{"version":3,"file":"inputFile2.js","sourceRoot":"","sources":["../../tests/cases/fourslash/inputFile2.ts"],"names":[],"mappings":"AAAA,IAAI,KAAK,GAAG,aAAa,CAAC;AAC1B,EAAE,CAAC,CAAC,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC;IACvB,IAAI,CAAC,GAAG,EAAE,CAAC;AACd,CAAC"}FileName : sample/outDir/inputFile2.js
var intro = "hello world";
if (intro !== undefined) {
var k = 10;
}
//# sourceMappingURL=inputFile2.js.map
| tests/baselines/reference/getEmitOutputSourceMap2.baseline | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017532662604935467,
0.00017260453023482114,
0.0001698824344202876,
0.00017260453023482114,
0.0000027220958145335317
]
|
{
"id": 4,
"code_window": [
"//// // /*2*/\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateInSingleLineComment.ts",
"type": "replace",
"edit_start_line_idx": 11
} | ===================================================================
JsFile: test.js
mapUrl: ../../mapFiles/test.js.map
sourceRoot:
sources: ../outputdir_singleFile/test.ts
===================================================================
-------------------------------------------------------------------
emittedFile:bin/test.js
sourceFile:../outputdir_singleFile/test.ts
-------------------------------------------------------------------
>>>var a1 = 10;
1 >
2 >^^^^
3 > ^^
4 > ^^^
5 > ^^
6 > ^
7 > ^^^^^^^^^^^^->
1 >
2 >var
3 > a1
4 > =
5 > 10
6 > ;
1 >Emitted(1, 1) Source(1, 1) + SourceIndex(0)
2 >Emitted(1, 5) Source(1, 5) + SourceIndex(0)
3 >Emitted(1, 7) Source(1, 7) + SourceIndex(0)
4 >Emitted(1, 10) Source(1, 10) + SourceIndex(0)
5 >Emitted(1, 12) Source(1, 12) + SourceIndex(0)
6 >Emitted(1, 13) Source(1, 13) + SourceIndex(0)
---
>>>var c1 = (function () {
1->
2 >^^^^^^^^^^^^^^^^^^^^->
1->
>
1->Emitted(2, 1) Source(2, 1) + SourceIndex(0)
---
>>> function c1() {
1->^^^^
2 > ^^->
1->
1->Emitted(3, 5) Source(2, 1) + SourceIndex(0)
---
>>> }
1->^^^^
2 > ^
3 > ^^^^^^^^^^->
1->class c1 {
> public p1: number;
>
2 > }
1->Emitted(4, 5) Source(4, 1) + SourceIndex(0)
2 >Emitted(4, 6) Source(4, 2) + SourceIndex(0)
---
>>> return c1;
1->^^^^
2 > ^^^^^^^^^
1->
2 > }
1->Emitted(5, 5) Source(4, 1) + SourceIndex(0)
2 >Emitted(5, 14) Source(4, 2) + SourceIndex(0)
---
>>>}());
1 >
2 >^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^->
1 >
2 >}
3 >
4 > class c1 {
> public p1: number;
> }
1 >Emitted(6, 1) Source(4, 1) + SourceIndex(0)
2 >Emitted(6, 2) Source(4, 2) + SourceIndex(0)
3 >Emitted(6, 2) Source(2, 1) + SourceIndex(0)
4 >Emitted(6, 6) Source(4, 2) + SourceIndex(0)
---
>>>var instance1 = new c1();
1->
2 >^^^^
3 > ^^^^^^^^^
4 > ^^^
5 > ^^^^
6 > ^^
7 > ^^
8 > ^
1->
>
>
2 >var
3 > instance1
4 > =
5 > new
6 > c1
7 > ()
8 > ;
1->Emitted(7, 1) Source(6, 1) + SourceIndex(0)
2 >Emitted(7, 5) Source(6, 5) + SourceIndex(0)
3 >Emitted(7, 14) Source(6, 14) + SourceIndex(0)
4 >Emitted(7, 17) Source(6, 17) + SourceIndex(0)
5 >Emitted(7, 21) Source(6, 21) + SourceIndex(0)
6 >Emitted(7, 23) Source(6, 23) + SourceIndex(0)
7 >Emitted(7, 25) Source(6, 25) + SourceIndex(0)
8 >Emitted(7, 26) Source(6, 26) + SourceIndex(0)
---
>>>function f1() {
1 >
2 >^^^^^^^^^^^^^^^^^^^^^^->
1 >
>
1 >Emitted(8, 1) Source(7, 1) + SourceIndex(0)
---
>>> return instance1;
1->^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^^^^
5 > ^
1->function f1() {
>
2 > return
3 >
4 > instance1
5 > ;
1->Emitted(9, 5) Source(8, 5) + SourceIndex(0)
2 >Emitted(9, 11) Source(8, 11) + SourceIndex(0)
3 >Emitted(9, 12) Source(8, 12) + SourceIndex(0)
4 >Emitted(9, 21) Source(8, 21) + SourceIndex(0)
5 >Emitted(9, 22) Source(8, 22) + SourceIndex(0)
---
>>>}
1 >
2 >^
3 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
>
2 >}
1 >Emitted(10, 1) Source(9, 1) + SourceIndex(0)
2 >Emitted(10, 2) Source(9, 2) + SourceIndex(0)
---
>>>//# sourceMappingURL=../../mapFiles/test.js.map | tests/baselines/reference/project/mapRootRelativePathSingleFileSpecifyOutputFile/node/mapRootRelativePathSingleFileSpecifyOutputFile.sourcemap.txt | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.0001759575097821653,
0.00017225401825271547,
0.00016414772835560143,
0.0001726342597976327,
0.000002988207597809378
]
|
{
"id": 4,
"code_window": [
"//// // /*2*/\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateInSingleLineComment.ts",
"type": "replace",
"edit_start_line_idx": 11
} | /* @internal */
namespace ts {
export interface Refactor {
/** An unique code associated with each refactor */
name: string;
/** Description of the refactor to display in the UI of the editor */
description: string;
/** Compute the associated code actions */
getEditsForAction(context: RefactorContext, actionName: string): RefactorEditInfo | undefined;
/** Compute (quickly) which actions are available here */
getAvailableActions(context: RefactorContext): ApplicableRefactorInfo[] | undefined;
}
export interface RefactorContext extends textChanges.TextChangesContext {
file: SourceFile;
startPosition: number;
endPosition?: number;
program: Program;
host: LanguageServiceHost;
cancellationToken?: CancellationToken;
}
export namespace refactor {
// A map with the refactor code as key, the refactor itself as value
// e.g. nonSuggestableRefactors[refactorCode] -> the refactor you want
const refactors: Map<Refactor> = createMap<Refactor>();
export function registerRefactor(refactor: Refactor) {
refactors.set(refactor.name, refactor);
}
export function getApplicableRefactors(context: RefactorContext): ApplicableRefactorInfo[] {
return flatMapIter(refactors.values(), refactor =>
context.cancellationToken && context.cancellationToken.isCancellationRequested() ? undefined : refactor.getAvailableActions(context));
}
export function getEditsForRefactor(context: RefactorContext, refactorName: string, actionName: string): RefactorEditInfo | undefined {
const refactor = refactors.get(refactorName);
return refactor && refactor.getEditsForAction(context, actionName);
}
}
export function getRefactorContextLength(context: RefactorContext): number {
return context.endPosition === undefined ? 0 : context.endPosition - context.startPosition;
}
}
| src/services/refactorProvider.ts | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017305370420217514,
0.0001701069122646004,
0.00016696914099156857,
0.0001697237603366375,
0.0000022052911390346708
]
|
{
"id": 6,
"code_window": [
" * \n",
" */`);\n",
"\n",
"verify.noDocCommentTemplateAt(\"n2\");\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n2\");\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 13
} | /// <reference path='fourslash.ts' />
// @Filename: functionDecl.ts
////f/*0*/unction /*1*/foo/*2*/(/*3*/) /*4*/{ /*5*/}
for (const marker of test.markers()) {
verify.noDocCommentTemplateAt(marker);
}
| tests/cases/fourslash/docCommentTemplateInsideFunctionDeclaration.ts | 1 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00026145915035158396,
0.00026145915035158396,
0.00026145915035158396,
0.00026145915035158396,
0
]
|
{
"id": 6,
"code_window": [
" * \n",
" */`);\n",
"\n",
"verify.noDocCommentTemplateAt(\"n2\");\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n2\");\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 13
} | === tests/cases/compiler/privacyGloImportParseErrors.ts ===
module m1 {
>m1 : Symbol(m1, Decl(privacyGloImportParseErrors.ts, 0, 0))
export module m1_M1_public {
>m1_M1_public : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 0, 11))
export class c1 {
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
}
export function f1() {
>f1 : Symbol(f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
return new c1;
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
}
export var v1 = c1;
>v1 : Symbol(v1, Decl(privacyGloImportParseErrors.ts, 7, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
export var v2: c1;
>v2 : Symbol(v2, Decl(privacyGloImportParseErrors.ts, 8, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
}
module m1_M2_private {
>m1_M2_private : Symbol(m1_M2_private, Decl(privacyGloImportParseErrors.ts, 9, 5))
export class c1 {
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
}
export function f1() {
>f1 : Symbol(f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
return new c1;
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
}
export var v1 = c1;
>v1 : Symbol(v1, Decl(privacyGloImportParseErrors.ts, 17, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
export var v2: c1;
>v2 : Symbol(v2, Decl(privacyGloImportParseErrors.ts, 18, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
}
export declare module "m1_M3_public" {
export function f1();
>f1 : Symbol(f1, Decl(privacyGloImportParseErrors.ts, 21, 42))
export class c1 {
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 22, 29))
}
export var v1: { new (): c1; };
>v1 : Symbol(v1, Decl(privacyGloImportParseErrors.ts, 25, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 22, 29))
export var v2: c1;
>v2 : Symbol(v2, Decl(privacyGloImportParseErrors.ts, 26, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 22, 29))
}
declare module "m1_M4_private" {
export function f1();
>f1 : Symbol(f1, Decl(privacyGloImportParseErrors.ts, 29, 36))
export class c1 {
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 30, 29))
}
export var v1: { new (): c1; };
>v1 : Symbol(v1, Decl(privacyGloImportParseErrors.ts, 33, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 30, 29))
export var v2: c1;
>v2 : Symbol(v2, Decl(privacyGloImportParseErrors.ts, 34, 18))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 30, 29))
}
import m1_im1_private = m1_M1_public;
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>m1_M1_public : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 0, 11))
export var m1_im1_private_v1_public = m1_im1_private.c1;
>m1_im1_private_v1_public : Symbol(m1_im1_private_v1_public, Decl(privacyGloImportParseErrors.ts, 38, 14))
>m1_im1_private.c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
export var m1_im1_private_v2_public = new m1_im1_private.c1();
>m1_im1_private_v2_public : Symbol(m1_im1_private_v2_public, Decl(privacyGloImportParseErrors.ts, 39, 14))
>m1_im1_private.c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
export var m1_im1_private_v3_public = m1_im1_private.f1;
>m1_im1_private_v3_public : Symbol(m1_im1_private_v3_public, Decl(privacyGloImportParseErrors.ts, 40, 14))
>m1_im1_private.f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
export var m1_im1_private_v4_public = m1_im1_private.f1();
>m1_im1_private_v4_public : Symbol(m1_im1_private_v4_public, Decl(privacyGloImportParseErrors.ts, 41, 14))
>m1_im1_private.f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
var m1_im1_private_v1_private = m1_im1_private.c1;
>m1_im1_private_v1_private : Symbol(m1_im1_private_v1_private, Decl(privacyGloImportParseErrors.ts, 42, 7))
>m1_im1_private.c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
var m1_im1_private_v2_private = new m1_im1_private.c1();
>m1_im1_private_v2_private : Symbol(m1_im1_private_v2_private, Decl(privacyGloImportParseErrors.ts, 43, 7))
>m1_im1_private.c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>c1 : Symbol(m1_im1_private.c1, Decl(privacyGloImportParseErrors.ts, 1, 32))
var m1_im1_private_v3_private = m1_im1_private.f1;
>m1_im1_private_v3_private : Symbol(m1_im1_private_v3_private, Decl(privacyGloImportParseErrors.ts, 44, 7))
>m1_im1_private.f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
var m1_im1_private_v4_private = m1_im1_private.f1();
>m1_im1_private_v4_private : Symbol(m1_im1_private_v4_private, Decl(privacyGloImportParseErrors.ts, 45, 7))
>m1_im1_private.f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
>m1_im1_private : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 35, 5))
>f1 : Symbol(m1_im1_private.f1, Decl(privacyGloImportParseErrors.ts, 3, 9))
import m1_im2_private = m1_M2_private;
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>m1_M2_private : Symbol(m1_M2_private, Decl(privacyGloImportParseErrors.ts, 9, 5))
export var m1_im2_private_v1_public = m1_im2_private.c1;
>m1_im2_private_v1_public : Symbol(m1_im2_private_v1_public, Decl(privacyGloImportParseErrors.ts, 49, 14))
>m1_im2_private.c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
export var m1_im2_private_v2_public = new m1_im2_private.c1();
>m1_im2_private_v2_public : Symbol(m1_im2_private_v2_public, Decl(privacyGloImportParseErrors.ts, 50, 14))
>m1_im2_private.c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
export var m1_im2_private_v3_public = m1_im2_private.f1;
>m1_im2_private_v3_public : Symbol(m1_im2_private_v3_public, Decl(privacyGloImportParseErrors.ts, 51, 14))
>m1_im2_private.f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
export var m1_im2_private_v4_public = m1_im2_private.f1();
>m1_im2_private_v4_public : Symbol(m1_im2_private_v4_public, Decl(privacyGloImportParseErrors.ts, 52, 14))
>m1_im2_private.f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
var m1_im2_private_v1_private = m1_im2_private.c1;
>m1_im2_private_v1_private : Symbol(m1_im2_private_v1_private, Decl(privacyGloImportParseErrors.ts, 53, 7))
>m1_im2_private.c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
var m1_im2_private_v2_private = new m1_im2_private.c1();
>m1_im2_private_v2_private : Symbol(m1_im2_private_v2_private, Decl(privacyGloImportParseErrors.ts, 54, 7))
>m1_im2_private.c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>c1 : Symbol(m1_im2_private.c1, Decl(privacyGloImportParseErrors.ts, 11, 26))
var m1_im2_private_v3_private = m1_im2_private.f1;
>m1_im2_private_v3_private : Symbol(m1_im2_private_v3_private, Decl(privacyGloImportParseErrors.ts, 55, 7))
>m1_im2_private.f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
var m1_im2_private_v4_private = m1_im2_private.f1();
>m1_im2_private_v4_private : Symbol(m1_im2_private_v4_private, Decl(privacyGloImportParseErrors.ts, 56, 7))
>m1_im2_private.f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
>m1_im2_private : Symbol(m1_im2_private, Decl(privacyGloImportParseErrors.ts, 45, 56))
>f1 : Symbol(m1_im2_private.f1, Decl(privacyGloImportParseErrors.ts, 13, 9))
import m1_im3_private = require("m1_M3_public");
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
export var m1_im3_private_v1_public = m1_im3_private.c1;
>m1_im3_private_v1_public : Symbol(m1_im3_private_v1_public, Decl(privacyGloImportParseErrors.ts, 59, 14))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
export var m1_im3_private_v2_public = new m1_im3_private.c1();
>m1_im3_private_v2_public : Symbol(m1_im3_private_v2_public, Decl(privacyGloImportParseErrors.ts, 60, 14))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
export var m1_im3_private_v3_public = m1_im3_private.f1;
>m1_im3_private_v3_public : Symbol(m1_im3_private_v3_public, Decl(privacyGloImportParseErrors.ts, 61, 14))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
export var m1_im3_private_v4_public = m1_im3_private.f1();
>m1_im3_private_v4_public : Symbol(m1_im3_private_v4_public, Decl(privacyGloImportParseErrors.ts, 62, 14))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
var m1_im3_private_v1_private = m1_im3_private.c1;
>m1_im3_private_v1_private : Symbol(m1_im3_private_v1_private, Decl(privacyGloImportParseErrors.ts, 63, 7))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
var m1_im3_private_v2_private = new m1_im3_private.c1();
>m1_im3_private_v2_private : Symbol(m1_im3_private_v2_private, Decl(privacyGloImportParseErrors.ts, 64, 7))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
var m1_im3_private_v3_private = m1_im3_private.f1;
>m1_im3_private_v3_private : Symbol(m1_im3_private_v3_private, Decl(privacyGloImportParseErrors.ts, 65, 7))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
var m1_im3_private_v4_private = m1_im3_private.f1();
>m1_im3_private_v4_private : Symbol(m1_im3_private_v4_private, Decl(privacyGloImportParseErrors.ts, 66, 7))
>m1_im3_private : Symbol(m1_im3_private, Decl(privacyGloImportParseErrors.ts, 56, 56))
import m1_im4_private = require("m1_M4_private");
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
export var m1_im4_private_v1_public = m1_im4_private.c1;
>m1_im4_private_v1_public : Symbol(m1_im4_private_v1_public, Decl(privacyGloImportParseErrors.ts, 69, 14))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
export var m1_im4_private_v2_public = new m1_im4_private.c1();
>m1_im4_private_v2_public : Symbol(m1_im4_private_v2_public, Decl(privacyGloImportParseErrors.ts, 70, 14))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
export var m1_im4_private_v3_public = m1_im4_private.f1;
>m1_im4_private_v3_public : Symbol(m1_im4_private_v3_public, Decl(privacyGloImportParseErrors.ts, 71, 14))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
export var m1_im4_private_v4_public = m1_im4_private.f1();
>m1_im4_private_v4_public : Symbol(m1_im4_private_v4_public, Decl(privacyGloImportParseErrors.ts, 72, 14))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
var m1_im4_private_v1_private = m1_im4_private.c1;
>m1_im4_private_v1_private : Symbol(m1_im4_private_v1_private, Decl(privacyGloImportParseErrors.ts, 73, 7))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
var m1_im4_private_v2_private = new m1_im4_private.c1();
>m1_im4_private_v2_private : Symbol(m1_im4_private_v2_private, Decl(privacyGloImportParseErrors.ts, 74, 7))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
var m1_im4_private_v3_private = m1_im4_private.f1;
>m1_im4_private_v3_private : Symbol(m1_im4_private_v3_private, Decl(privacyGloImportParseErrors.ts, 75, 7))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
var m1_im4_private_v4_private = m1_im4_private.f1();
>m1_im4_private_v4_private : Symbol(m1_im4_private_v4_private, Decl(privacyGloImportParseErrors.ts, 76, 7))
>m1_im4_private : Symbol(m1_im4_private, Decl(privacyGloImportParseErrors.ts, 66, 56))
export import m1_im1_public = m1_M1_public;
>m1_im1_public : Symbol(m1_im1_public, Decl(privacyGloImportParseErrors.ts, 76, 56))
>m1_M1_public : Symbol(m1_im1_private, Decl(privacyGloImportParseErrors.ts, 0, 11))
export import m1_im2_public = m1_M2_private;
>m1_im2_public : Symbol(m1_im2_public, Decl(privacyGloImportParseErrors.ts, 78, 47))
>m1_M2_private : Symbol(m1_M2_private, Decl(privacyGloImportParseErrors.ts, 9, 5))
export import m1_im3_public = require("m1_M3_public");
>m1_im3_public : Symbol(m1_im3_public, Decl(privacyGloImportParseErrors.ts, 79, 48))
export import m1_im4_public = require("m1_M4_private");
>m1_im4_public : Symbol(m1_im4_public, Decl(privacyGloImportParseErrors.ts, 80, 58))
}
module glo_M1_public {
>glo_M1_public : Symbol(glo_M1_public, Decl(privacyGloImportParseErrors.ts, 82, 1))
export class c1 {
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
}
export function f1() {
>f1 : Symbol(f1, Decl(privacyGloImportParseErrors.ts, 86, 5))
return new c1;
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
}
export var v1 = c1;
>v1 : Symbol(v1, Decl(privacyGloImportParseErrors.ts, 90, 14))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
export var v2: c1;
>v2 : Symbol(v2, Decl(privacyGloImportParseErrors.ts, 91, 14))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
}
declare module "glo_M2_public" {
export function f1();
>f1 : Symbol(f1, Decl(privacyGloImportParseErrors.ts, 94, 32))
export class c1 {
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
}
export var v1: { new (): c1; };
>v1 : Symbol(v1, Decl(privacyGloImportParseErrors.ts, 98, 14))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
export var v2: c1;
>v2 : Symbol(v2, Decl(privacyGloImportParseErrors.ts, 99, 14))
>c1 : Symbol(c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
}
declare module "use_glo_M1_public" {
import use_glo_M1_public = glo_M1_public;
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
>glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 82, 1))
export var use_glo_M1_public_v1_public: { new (): use_glo_M1_public.c1; };
>use_glo_M1_public_v1_public : Symbol(use_glo_M1_public_v1_public, Decl(privacyGloImportParseErrors.ts, 104, 14))
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
>c1 : Symbol(use_glo_M1_public.c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
export var use_glo_M1_public_v2_public: typeof use_glo_M1_public;
>use_glo_M1_public_v2_public : Symbol(use_glo_M1_public_v2_public, Decl(privacyGloImportParseErrors.ts, 105, 14))
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
export var use_glo_M1_public_v3_public: ()=> use_glo_M1_public.c1;
>use_glo_M1_public_v3_public : Symbol(use_glo_M1_public_v3_public, Decl(privacyGloImportParseErrors.ts, 106, 14))
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
>c1 : Symbol(use_glo_M1_public.c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
var use_glo_M1_public_v1_private: { new (): use_glo_M1_public.c1; };
>use_glo_M1_public_v1_private : Symbol(use_glo_M1_public_v1_private, Decl(privacyGloImportParseErrors.ts, 107, 7))
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
>c1 : Symbol(use_glo_M1_public.c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
var use_glo_M1_public_v2_private: typeof use_glo_M1_public;
>use_glo_M1_public_v2_private : Symbol(use_glo_M1_public_v2_private, Decl(privacyGloImportParseErrors.ts, 108, 7))
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
var use_glo_M1_public_v3_private: () => use_glo_M1_public.c1;
>use_glo_M1_public_v3_private : Symbol(use_glo_M1_public_v3_private, Decl(privacyGloImportParseErrors.ts, 109, 7))
>use_glo_M1_public : Symbol(use_glo_M1_public, Decl(privacyGloImportParseErrors.ts, 102, 36))
>c1 : Symbol(use_glo_M1_public.c1, Decl(privacyGloImportParseErrors.ts, 84, 22))
import use_glo_M2_public = require("glo_M2_public");
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
export var use_glo_M2_public_v1_public: { new (): use_glo_M2_public.c1; };
>use_glo_M2_public_v1_public : Symbol(use_glo_M2_public_v1_public, Decl(privacyGloImportParseErrors.ts, 112, 14))
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
>c1 : Symbol(use_glo_M2_public.c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
export var use_glo_M2_public_v2_public: typeof use_glo_M2_public;
>use_glo_M2_public_v2_public : Symbol(use_glo_M2_public_v2_public, Decl(privacyGloImportParseErrors.ts, 113, 14))
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
export var use_glo_M2_public_v3_public: () => use_glo_M2_public.c1;
>use_glo_M2_public_v3_public : Symbol(use_glo_M2_public_v3_public, Decl(privacyGloImportParseErrors.ts, 114, 14))
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
>c1 : Symbol(use_glo_M2_public.c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
var use_glo_M2_public_v1_private: { new (): use_glo_M2_public.c1; };
>use_glo_M2_public_v1_private : Symbol(use_glo_M2_public_v1_private, Decl(privacyGloImportParseErrors.ts, 115, 7))
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
>c1 : Symbol(use_glo_M2_public.c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
var use_glo_M2_public_v2_private: typeof use_glo_M2_public;
>use_glo_M2_public_v2_private : Symbol(use_glo_M2_public_v2_private, Decl(privacyGloImportParseErrors.ts, 116, 7))
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
var use_glo_M2_public_v3_private: () => use_glo_M2_public.c1;
>use_glo_M2_public_v3_private : Symbol(use_glo_M2_public_v3_private, Decl(privacyGloImportParseErrors.ts, 117, 7))
>use_glo_M2_public : Symbol(use_glo_M2_public, Decl(privacyGloImportParseErrors.ts, 109, 65))
>c1 : Symbol(use_glo_M2_public.c1, Decl(privacyGloImportParseErrors.ts, 95, 25))
module m2 {
>m2 : Symbol(m2, Decl(privacyGloImportParseErrors.ts, 117, 65))
import errorImport = require("glo_M2_public");
>errorImport : Symbol(errorImport, Decl(privacyGloImportParseErrors.ts, 119, 15))
import nonerrorImport = glo_M1_public;
>nonerrorImport : Symbol(nonerrorImport, Decl(privacyGloImportParseErrors.ts, 120, 54))
>glo_M1_public : Symbol(nonerrorImport, Decl(privacyGloImportParseErrors.ts, 82, 1))
module m5 {
>m5 : Symbol(m5, Decl(privacyGloImportParseErrors.ts, 121, 46))
import m5_errorImport = require("glo_M2_public");
>m5_errorImport : Symbol(m5_errorImport, Decl(privacyGloImportParseErrors.ts, 123, 19))
import m5_nonerrorImport = glo_M1_public;
>m5_nonerrorImport : Symbol(m5_nonerrorImport, Decl(privacyGloImportParseErrors.ts, 124, 61))
>glo_M1_public : Symbol(m5_nonerrorImport, Decl(privacyGloImportParseErrors.ts, 82, 1))
}
}
}
declare module "anotherParseError" {
module m2 {
>m2 : Symbol(m2, Decl(privacyGloImportParseErrors.ts, 130, 36), Decl(privacyGloImportParseErrors.ts, 134, 5))
declare module "abc" {
}
}
module m2 {
>m2 : Symbol(m2, Decl(privacyGloImportParseErrors.ts, 130, 36), Decl(privacyGloImportParseErrors.ts, 134, 5))
module "abc2" {
}
}
module "abc3" {
}
}
module m2 {
>m2 : Symbol(m2, Decl(privacyGloImportParseErrors.ts, 142, 1))
import m3 = require("use_glo_M1_public");
>m3 : Symbol(m3, Decl(privacyGloImportParseErrors.ts, 144, 11))
module m4 {
>m4 : Symbol(m4, Decl(privacyGloImportParseErrors.ts, 145, 45))
var a = 10;
>a : Symbol(a, Decl(privacyGloImportParseErrors.ts, 147, 11))
import m2 = require("use_glo_M1_public");
>m2 : Symbol(m2, Decl(privacyGloImportParseErrors.ts, 147, 19))
}
}
| tests/baselines/reference/privacyGloImportParseErrors.symbols | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017265352653339505,
0.00016806078201625496,
0.00016515013703610748,
0.0001678230764809996,
0.0000015370958408311708
]
|
{
"id": 6,
"code_window": [
" * \n",
" */`);\n",
"\n",
"verify.noDocCommentTemplateAt(\"n2\");\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n2\");\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 13
} | /// <reference path="fourslash.ts" />
//// class A<B, /**/B> { }
goTo.marker();
verify.quickInfoExists();
| tests/cases/fourslash/duplicateTypeParameters.ts | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017423188546672463,
0.00017423188546672463,
0.00017423188546672463,
0.00017423188546672463,
0
]
|
{
"id": 6,
"code_window": [
" * \n",
" */`);\n",
"\n",
"verify.noDocCommentTemplateAt(\"n2\");\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n2\");\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 13
} | export export class Foo {
public Bar() {
}
} | tests/cases/conformance/parser/ecmascript5/RegressionTests/parser618973.ts | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.0001714665413601324,
0.0001714665413601324,
0.0001714665413601324,
0.0001714665413601324,
0
]
|
{
"id": 7,
"code_window": [
"\n",
"verify.noDocCommentTemplateAt(\"n3\");\n"
],
"labels": [
"keep",
"replace"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n3\");"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 15
} | /// <reference path='fourslash.ts' />
/////*top*/
////namespace n1.
//// /*n2*/ n2.
//// /*n3*/ n3 {
////}
verify.docCommentTemplateAt("top", /*indentation*/ 8,
`/**
*
*/`);
verify.noDocCommentTemplateAt("n2");
verify.noDocCommentTemplateAt("n3");
| tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts | 1 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.5499757528305054,
0.3570602834224701,
0.164144828915596,
0.3570602834224701,
0.19291545450687408
]
|
{
"id": 7,
"code_window": [
"\n",
"verify.noDocCommentTemplateAt(\"n3\");\n"
],
"labels": [
"keep",
"replace"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n3\");"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 15
} | for (var v of []) {
var x = v;
} | tests/cases/conformance/statements/for-ofStatements/ES5For-of2.ts | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.0001782077451935038,
0.0001782077451935038,
0.0001782077451935038,
0.0001782077451935038,
0
]
|
{
"id": 7,
"code_window": [
"\n",
"verify.noDocCommentTemplateAt(\"n3\");\n"
],
"labels": [
"keep",
"replace"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n3\");"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 15
} | class Model {
public name: string;
}
class UI {
constructor(model: Model, foo = model.name)
{
}
} | tests/cases/compiler/parameterReferencesOtherParameter2.ts | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017664842016529292,
0.00017664842016529292,
0.00017664842016529292,
0.00017664842016529292,
0
]
|
{
"id": 7,
"code_window": [
"\n",
"verify.noDocCommentTemplateAt(\"n3\");\n"
],
"labels": [
"keep",
"replace"
],
"after_edit": [
"verify.emptyDocCommentTemplateAt(\"n3\");"
],
"file_path": "tests/cases/fourslash/docCommentTemplateNamespacesAndModules02.ts",
"type": "replace",
"edit_start_line_idx": 15
} | //// [parserES5ComputedPropertyName5.ts]
interface I {
[e]: number
}
//// [parserES5ComputedPropertyName5.js]
| tests/baselines/reference/parserES5ComputedPropertyName5.js | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017001386731863022,
0.00017001386731863022,
0.00017001386731863022,
0.00017001386731863022,
0
]
|
{
"id": 8,
"code_window": [
"// @Filename: regex.ts\n",
"////var regex = /*0*///*1*/asdf/*2*/ /*3*///*4*/;\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateRegex.ts",
"type": "replace",
"edit_start_line_idx": 6
} | /// <reference path='fourslash.ts' />
// @Filename: justAComment.ts
//// // We want to check off-by-one errors in assessing the end of the comment, so we check twice,
//// // first with a trailing space and then without.
//// // /*0*/
//// // /*1*/
//// // We also want to check EOF handling at the end of a comment
//// // /*2*/
for (const marker of test.markers()) {
verify.noDocCommentTemplateAt(marker);
}
| tests/cases/fourslash/docCommentTemplateInSingleLineComment.ts | 1 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.9906798005104065,
0.495421826839447,
0.00016386232164222747,
0.495421826839447,
0.4952579736709595
]
|
{
"id": 8,
"code_window": [
"// @Filename: regex.ts\n",
"////var regex = /*0*///*1*/asdf/*2*/ /*3*///*4*/;\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateRegex.ts",
"type": "replace",
"edit_start_line_idx": 6
} |
1 >var a = 10;
~~~~~~~~~~~~ => Pos: (0 to 11) SpanInfo: {"start":0,"length":10}
>var a = 10
>:=> (line 1, col 0) to (line 1, col 10)
--------------------------------
2 >var b;
~~~~~~~ => Pos: (12 to 18) SpanInfo: undefined
--------------------------------
3 >var c = 10, d, e;
~~~~~~~~~~~~~~~~~~ => Pos: (19 to 36) SpanInfo: {"start":19,"length":10}
>var c = 10
>:=> (line 3, col 0) to (line 3, col 10)
--------------------------------
4 >var c2, d2 = 10;
~~~~~~~ => Pos: (37 to 43) SpanInfo: undefined
4 >var c2, d2 = 10;
~~~~~~~~~~ => Pos: (44 to 53) SpanInfo: {"start":45,"length":7}
>d2 = 10
>:=> (line 4, col 8) to (line 4, col 15)
--------------------------------
5 >module m {
~~~~~~~~~~~ => Pos: (54 to 64) SpanInfo: {"start":54,"length":146}
>module m {
> var x1;
> var x2 = 10, x3 = 10;
> var x4, x5;
> export var xx1;
> export var xx2 = 10, xx3 = 10;
> export var xx4, xx5;
>}
>:=> (line 5, col 0) to (line 12, col 1)
--------------------------------
6 > var x1;
~~~~~~~~~~~~ => Pos: (65 to 76) SpanInfo: undefined
--------------------------------
7 > var x2 = 10, x3 = 10;
~~~~~~~~~~~~~~~~ => Pos: (77 to 92) SpanInfo: {"start":81,"length":11}
>var x2 = 10
>:=> (line 7, col 4) to (line 7, col 15)
7 > var x2 = 10, x3 = 10;
~~~~~~~~~~ => Pos: (93 to 102) SpanInfo: {"start":94,"length":7}
>x3 = 10
>:=> (line 7, col 17) to (line 7, col 24)
--------------------------------
8 > var x4, x5;
~~~~~~~~~~~~~~~~ => Pos: (103 to 118) SpanInfo: undefined
--------------------------------
9 > export var xx1;
~~~~~~~~~~~~~~~~~~~~ => Pos: (119 to 138) SpanInfo: undefined
--------------------------------
10 > export var xx2 = 10, xx3 = 10;
~~~~~~~~~~~~~~~~~~~~~~~~ => Pos: (139 to 162) SpanInfo: {"start":150,"length":12}
>var xx2 = 10
>:=> (line 10, col 11) to (line 10, col 23)
10 > export var xx2 = 10, xx3 = 10;
~~~~~~~~~~~ => Pos: (163 to 173) SpanInfo: {"start":164,"length":8}
>xx3 = 10
>:=> (line 10, col 25) to (line 10, col 33)
--------------------------------
11 > export var xx4, xx5;
~~~~~~~~~~~~~~~~~~~~~~~~~ => Pos: (174 to 198) SpanInfo: undefined
--------------------------------
12 >}
~ => Pos: (199 to 199) SpanInfo: {"start":199,"length":1}
>}
>:=> (line 12, col 0) to (line 12, col 1) | tests/baselines/reference/bpSpan_variables.baseline | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00022620608797296882,
0.0001786207576515153,
0.00016624377167318016,
0.0001749759103404358,
0.00001739720391924493
]
|
{
"id": 8,
"code_window": [
"// @Filename: regex.ts\n",
"////var regex = /*0*///*1*/asdf/*2*/ /*3*///*4*/;\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateRegex.ts",
"type": "replace",
"edit_start_line_idx": 6
} | //// [unicodeExtendedEscapesInStrings11_ES6.ts]
// ES6 Spec - 10.1.1 Static Semantics: UTF16Encoding (cp)
// 2. Let cu2 be ((cp – 65536) modulo 1024) + 0xDC00.
// Although we should just get back a single code point value of 0xDC00,
// this is a useful edge-case test.
var x = "\u{DC00}";
//// [unicodeExtendedEscapesInStrings11_ES6.js]
// ES6 Spec - 10.1.1 Static Semantics: UTF16Encoding (cp)
// 2. Let cu2 be ((cp – 65536) modulo 1024) + 0xDC00.
// Although we should just get back a single code point value of 0xDC00,
// this is a useful edge-case test.
var x = "\u{DC00}";
| tests/baselines/reference/unicodeExtendedEscapesInStrings11_ES6.js | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.00017432063759770244,
0.00017223527538590133,
0.00017014992772601545,
0.00017223527538590133,
0.0000020853549358434975
]
|
{
"id": 8,
"code_window": [
"// @Filename: regex.ts\n",
"////var regex = /*0*///*1*/asdf/*2*/ /*3*///*4*/;\n",
"\n",
"for (const marker of test.markers()) {\n",
" verify.noDocCommentTemplateAt(marker);\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" verify.emptyDocCommentTemplateAt(marker);\n"
],
"file_path": "tests/cases/fourslash/docCommentTemplateRegex.ts",
"type": "replace",
"edit_start_line_idx": 6
} | //// [circularImportAlias.ts]
// expected no error
module B {
export import a = A;
export class D extends a.C {
id: number;
}
}
module A {
export class C { name: string }
export import b = B;
}
var c: { name: string };
var c = new B.a.C();
//// [circularImportAlias.js]
// expected no error
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var B;
(function (B) {
B.a = A;
var D = /** @class */ (function (_super) {
__extends(D, _super);
function D() {
return _super !== null && _super.apply(this, arguments) || this;
}
return D;
}(B.a.C));
B.D = D;
})(B || (B = {}));
var A;
(function (A) {
var C = /** @class */ (function () {
function C() {
}
return C;
}());
A.C = C;
A.b = B;
})(A || (A = {}));
var c;
var c = new B.a.C();
| tests/baselines/reference/circularImportAlias.js | 0 | https://github.com/microsoft/TypeScript/commit/b566480aaaf92460b37eb0977b5c07c1c0729c85 | [
0.0001793684932636097,
0.00017517036758363247,
0.00017026108980644494,
0.00017563239089213312,
0.0000028937633942405228
]
|
{
"id": 0,
"code_window": [
" width?: number;\n",
" labelWidth?: number;\n",
" grow?: boolean;\n",
" onBlur?: (event: FormEvent<HTMLInputElement>) => void;\n",
"}\n",
"\n",
"export function VariableTextField({\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive?: boolean;\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 16
} | import React, { FormEvent, PropsWithChildren, ReactElement } from 'react';
import { InlineField, Input, PopoverContent } from '@grafana/ui';
interface VariableTextFieldProps {
value: string;
name: string;
placeholder: string;
onChange: (event: FormEvent<HTMLInputElement>) => void;
testId?: string;
tooltip?: PopoverContent;
required?: boolean;
width?: number;
labelWidth?: number;
grow?: boolean;
onBlur?: (event: FormEvent<HTMLInputElement>) => void;
}
export function VariableTextField({
value,
name,
placeholder,
onChange,
testId,
width,
labelWidth,
required,
onBlur,
tooltip,
grow,
}: PropsWithChildren<VariableTextFieldProps>): ReactElement {
return (
<InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>
<Input
type="text"
id={name}
name={name}
placeholder={placeholder}
value={value}
onChange={onChange}
onBlur={onBlur}
width={grow ? undefined : width ?? 25}
data-testid={testId}
required={required}
/>
</InlineField>
);
}
| public/app/features/variables/editor/VariableTextField.tsx | 1 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.9974320530891418,
0.6183358430862427,
0.020352328196167946,
0.9940633177757263,
0.4630809724330902
]
|
{
"id": 0,
"code_window": [
" width?: number;\n",
" labelWidth?: number;\n",
" grow?: boolean;\n",
" onBlur?: (event: FormEvent<HTMLInputElement>) => void;\n",
"}\n",
"\n",
"export function VariableTextField({\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive?: boolean;\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 16
} | import { css } from '@emotion/css';
import React, { MouseEvent, memo } from 'react';
import { GrafanaTheme } from '@grafana/data';
import { stylesFactory, useTheme } from '@grafana/ui';
import { NodesMarker } from './types';
const nodeR = 40;
const getStyles = stylesFactory((theme: GrafanaTheme) => ({
mainGroup: css`
cursor: pointer;
font-size: 10px;
`,
mainCircle: css`
fill: ${theme.colors.panelBg};
stroke: ${theme.colors.border3};
`,
text: css`
width: 50px;
height: 50px;
text-align: center;
display: flex;
align-items: center;
justify-content: center;
`,
}));
export const Marker = memo(function Marker(props: {
marker: NodesMarker;
onClick?: (event: MouseEvent<SVGElement>, marker: NodesMarker) => void;
}) {
const { marker, onClick } = props;
const { node } = marker;
const styles = getStyles(useTheme());
if (!(node.x !== undefined && node.y !== undefined)) {
return null;
}
return (
<g
data-node-id={node.id}
className={styles.mainGroup}
onClick={(event) => {
onClick?.(event, marker);
}}
aria-label={`Hidden nodes marker: ${node.id}`}
>
<circle className={styles.mainCircle} r={nodeR} cx={node.x} cy={node.y} />
<g>
<foreignObject x={node.x - 25} y={node.y - 25} width="50" height="50">
<div className={styles.text}>
{/* we limit the count to 101 so if we have more than 100 nodes we don't have exact count */}
<span>{marker.count > 100 ? '>100' : marker.count} nodes</span>
</div>
</foreignObject>
</g>
</g>
);
});
| public/app/plugins/panel/nodeGraph/Marker.tsx | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00017530491459183395,
0.00017237977590411901,
0.0001667801261646673,
0.00017354844021610916,
0.0000030916226023691706
]
|
{
"id": 0,
"code_window": [
" width?: number;\n",
" labelWidth?: number;\n",
" grow?: boolean;\n",
" onBlur?: (event: FormEvent<HTMLInputElement>) => void;\n",
"}\n",
"\n",
"export function VariableTextField({\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive?: boolean;\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 16
} | import { DataFrame, FieldType } from '@grafana/data';
import {
histogramFrameBucketMinFieldName,
histogramFrameBucketMaxFieldName,
} from '@grafana/data/src/transformations/transformers/histogram';
export function originalDataHasHistogram(frames?: DataFrame[]): boolean {
if (frames?.length !== 1) {
return false;
}
const frame = frames[0];
if (frame.fields.length < 3) {
return false;
}
if (
frame.fields[0].name !== histogramFrameBucketMinFieldName ||
frame.fields[1].name !== histogramFrameBucketMaxFieldName
) {
return false;
}
for (const field of frame.fields) {
if (field.type !== FieldType.number) {
return false;
}
}
return true;
}
| public/app/plugins/panel/histogram/utils.ts | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00017429818399250507,
0.0001700603897916153,
0.00016707487520761788,
0.00016880815383046865,
0.0000030789767606620444
]
|
{
"id": 0,
"code_window": [
" width?: number;\n",
" labelWidth?: number;\n",
" grow?: boolean;\n",
" onBlur?: (event: FormEvent<HTMLInputElement>) => void;\n",
"}\n",
"\n",
"export function VariableTextField({\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive?: boolean;\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 16
} | +++
title = "Release notes for Grafana 8.0.3"
[_build]
list = false
+++
<!-- Auto generated by update changelog github action -->
# Release notes for Grafana 8.0.3
### Features and enhancements
- **Alerting:** Increase alertmanager_conf column if MySQL. [#35657](https://github.com/grafana/grafana/pull/35657), [@kylebrandt](https://github.com/kylebrandt)
- **Time series/Bar chart panel:** Handle infinite numbers as nulls when converting to plot array. [#35638](https://github.com/grafana/grafana/pull/35638), [@dprokop](https://github.com/dprokop)
- **TimeSeries:** Ensure series overrides that contain color are migrated, and migrate the previous `fieldConfig` when changing the panel type. [#35676](https://github.com/grafana/grafana/pull/35676), [@ashharrison90](https://github.com/ashharrison90)
- **ValueMappings:** Improve singlestat value mappings migration. [#35578](https://github.com/grafana/grafana/pull/35578), [@dprokop](https://github.com/dprokop)
### Bug fixes
- **Annotations:** Fix annotation line and marker colors. [#35608](https://github.com/grafana/grafana/pull/35608), [@torkelo](https://github.com/torkelo)
- **AzureMonitor:** Fix KQL template variable queries without default workspace. [#35836](https://github.com/grafana/grafana/pull/35836), [@joshhunt](https://github.com/joshhunt)
- **CloudWatch/Logs:** Fix missing response data for log queries. [#35724](https://github.com/grafana/grafana/pull/35724), [@aocenas](https://github.com/aocenas)
- **Elasticsearch:** Restore previous field naming strategy when using variables. [#35624](https://github.com/grafana/grafana/pull/35624), [@Elfo404](https://github.com/Elfo404)
- **LibraryPanels:** Fix crash in library panels list when panel plugin is not found. [#35907](https://github.com/grafana/grafana/pull/35907), [@torkelo](https://github.com/torkelo)
- **LogsPanel:** Fix performance drop when moving logs panel in dashboard. [#35379](https://github.com/grafana/grafana/pull/35379), [@aocenas](https://github.com/aocenas)
- **Loki:** Parse log levels when ANSI coloring is enabled. [#35607](https://github.com/grafana/grafana/pull/35607), [@olbo98](https://github.com/olbo98)
- **MSSQL:** Fix issue with hidden queries still being executed. [#35787](https://github.com/grafana/grafana/pull/35787), [@torkelo](https://github.com/torkelo)
- **PanelEdit:** Display the VisualizationPicker that was not displayed if a panel has an unknown panel plugin. [#35831](https://github.com/grafana/grafana/pull/35831), [@jackw](https://github.com/jackw)
- **Plugins:** Fix loading symbolically linked plugins. [#35635](https://github.com/grafana/grafana/pull/35635), [@domasx2](https://github.com/domasx2)
- **Prometheus:** Fix issue where legend name was replaced with name Value in stat and gauge panels. [#35863](https://github.com/grafana/grafana/pull/35863), [@torkelo](https://github.com/torkelo)
- **State Timeline:** Fix crash when hovering over panel. [#35692](https://github.com/grafana/grafana/pull/35692), [@hugohaggmark](https://github.com/hugohaggmark)
| docs/sources/release-notes/release-notes-8-0-3.md | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00017541293345857412,
0.00017135140660684556,
0.0001641466369619593,
0.0001729230280034244,
0.0000043914328671235126
]
|
{
"id": 1,
"code_window": [
" required,\n",
" onBlur,\n",
" tooltip,\n",
" grow,\n",
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep"
],
"after_edit": [
" interactive,\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 30
} | import React, { FormEvent, PropsWithChildren, ReactElement } from 'react';
import { InlineField, Input, PopoverContent } from '@grafana/ui';
interface VariableTextFieldProps {
value: string;
name: string;
placeholder: string;
onChange: (event: FormEvent<HTMLInputElement>) => void;
testId?: string;
tooltip?: PopoverContent;
required?: boolean;
width?: number;
labelWidth?: number;
grow?: boolean;
onBlur?: (event: FormEvent<HTMLInputElement>) => void;
}
export function VariableTextField({
value,
name,
placeholder,
onChange,
testId,
width,
labelWidth,
required,
onBlur,
tooltip,
grow,
}: PropsWithChildren<VariableTextFieldProps>): ReactElement {
return (
<InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>
<Input
type="text"
id={name}
name={name}
placeholder={placeholder}
value={value}
onChange={onChange}
onBlur={onBlur}
width={grow ? undefined : width ?? 25}
data-testid={testId}
required={required}
/>
</InlineField>
);
}
| public/app/features/variables/editor/VariableTextField.tsx | 1 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.9954782128334045,
0.5451847314834595,
0.0033873484935611486,
0.8278263807296753,
0.4414803385734558
]
|
{
"id": 1,
"code_window": [
" required,\n",
" onBlur,\n",
" tooltip,\n",
" grow,\n",
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep"
],
"after_edit": [
" interactive,\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 30
} | import { dateTimeFormat, dateTimeFormatTimeAgo, localTimeFormat, systemDateFormats } from '../datetime';
import { toDuration as duration, toUtc, dateTime } from '../datetime/moment_wrapper';
import { TimeZone } from '../types';
import { DecimalCount } from '../types/displayValue';
import { toFixed, toFixedScaled, FormattedValue, ValueFormatter } from './valueFormats';
interface IntervalsInSeconds {
[interval: string]: number;
}
export enum Interval {
Year = 'year',
Month = 'month',
Week = 'week',
Day = 'day',
Hour = 'hour',
Minute = 'minute',
Second = 'second',
Millisecond = 'millisecond',
}
const UNITS = [
Interval.Year,
Interval.Month,
Interval.Week,
Interval.Day,
Interval.Hour,
Interval.Minute,
Interval.Second,
Interval.Millisecond,
];
const INTERVALS_IN_SECONDS: IntervalsInSeconds = {
[Interval.Year]: 31536000,
[Interval.Month]: 2592000,
[Interval.Week]: 604800,
[Interval.Day]: 86400,
[Interval.Hour]: 3600,
[Interval.Minute]: 60,
[Interval.Second]: 1,
[Interval.Millisecond]: 0.001,
};
export function toNanoSeconds(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
if (Math.abs(size) < 1000) {
return { text: toFixed(size, decimals), suffix: ' ns' };
} else if (Math.abs(size) < 1000000) {
return toFixedScaled(size / 1000, decimals, ' µs');
} else if (Math.abs(size) < 1000000000) {
return toFixedScaled(size / 1000000, decimals, ' ms');
} else if (Math.abs(size) < 60000000000) {
return toFixedScaled(size / 1000000000, decimals, ' s');
} else if (Math.abs(size) < 3600000000000) {
return toFixedScaled(size / 60000000000, decimals, ' min');
} else if (Math.abs(size) < 86400000000000) {
return toFixedScaled(size / 3600000000000, decimals, ' hour');
} else {
return toFixedScaled(size / 86400000000000, decimals, ' day');
}
}
export function toMicroSeconds(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
if (Math.abs(size) < 1000) {
return { text: toFixed(size, decimals), suffix: ' µs' };
} else if (Math.abs(size) < 1000000) {
return toFixedScaled(size / 1000, decimals, ' ms');
} else {
return toFixedScaled(size / 1000000, decimals, ' s');
}
}
export function toMilliSeconds(size: number, decimals?: DecimalCount, scaledDecimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
if (Math.abs(size) < 1000) {
return { text: toFixed(size, decimals), suffix: ' ms' };
} else if (Math.abs(size) < 60000) {
// Less than 1 min
return toFixedScaled(size / 1000, decimals, ' s');
} else if (Math.abs(size) < 3600000) {
// Less than 1 hour, divide in minutes
return toFixedScaled(size / 60000, decimals, ' min');
} else if (Math.abs(size) < 86400000) {
// Less than one day, divide in hours
return toFixedScaled(size / 3600000, decimals, ' hour');
} else if (Math.abs(size) < 31536000000) {
// Less than one year, divide in days
return toFixedScaled(size / 86400000, decimals, ' day');
}
return toFixedScaled(size / 31536000000, decimals, ' year');
}
export function trySubstract(value1: DecimalCount, value2: DecimalCount): DecimalCount {
if (value1 !== null && value1 !== undefined && value2 !== null && value2 !== undefined) {
return value1 - value2;
}
return undefined;
}
export function toSeconds(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
// If 0, use s unit instead of ns
if (size === 0) {
return { text: '0', suffix: ' s' };
}
// Less than 1 µs, divide in ns
if (Math.abs(size) < 0.000001) {
return toFixedScaled(size * 1e9, decimals, ' ns');
}
// Less than 1 ms, divide in µs
if (Math.abs(size) < 0.001) {
return toFixedScaled(size * 1e6, decimals, ' µs');
}
// Less than 1 second, divide in ms
if (Math.abs(size) < 1) {
return toFixedScaled(size * 1e3, decimals, ' ms');
}
if (Math.abs(size) < 60) {
return { text: toFixed(size, decimals), suffix: ' s' };
} else if (Math.abs(size) < 3600) {
// Less than 1 hour, divide in minutes
return toFixedScaled(size / 60, decimals, ' min');
} else if (Math.abs(size) < 86400) {
// Less than one day, divide in hours
return toFixedScaled(size / 3600, decimals, ' hour');
} else if (Math.abs(size) < 604800) {
// Less than one week, divide in days
return toFixedScaled(size / 86400, decimals, ' day');
} else if (Math.abs(size) < 31536000) {
// Less than one year, divide in week
return toFixedScaled(size / 604800, decimals, ' week');
}
return toFixedScaled(size / 3.15569e7, decimals, ' year');
}
export function toMinutes(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
if (Math.abs(size) < 60) {
return { text: toFixed(size, decimals), suffix: ' min' };
} else if (Math.abs(size) < 1440) {
return toFixedScaled(size / 60, decimals, ' hour');
} else if (Math.abs(size) < 10080) {
return toFixedScaled(size / 1440, decimals, ' day');
} else if (Math.abs(size) < 604800) {
return toFixedScaled(size / 10080, decimals, ' week');
} else {
return toFixedScaled(size / 5.25948e5, decimals, ' year');
}
}
export function toHours(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
if (Math.abs(size) < 24) {
return { text: toFixed(size, decimals), suffix: ' hour' };
} else if (Math.abs(size) < 168) {
return toFixedScaled(size / 24, decimals, ' day');
} else if (Math.abs(size) < 8760) {
return toFixedScaled(size / 168, decimals, ' week');
} else {
return toFixedScaled(size / 8760, decimals, ' year');
}
}
export function toDays(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
if (Math.abs(size) < 7) {
return { text: toFixed(size, decimals), suffix: ' day' };
} else if (Math.abs(size) < 365) {
return toFixedScaled(size / 7, decimals, ' week');
} else {
return toFixedScaled(size / 365, decimals, ' year');
}
}
export function toDuration(size: number, decimals: DecimalCount, timeScale: Interval): FormattedValue {
if (size === null) {
return { text: '' };
}
if (size === 0) {
return { text: '0', suffix: ' ' + timeScale + 's' };
}
if (size < 0) {
const v = toDuration(-size, decimals, timeScale);
if (!v.suffix) {
v.suffix = '';
}
v.suffix += ' ago';
return v;
}
// convert $size to milliseconds
// intervals_in_seconds uses seconds (duh), convert them to milliseconds here to minimize floating point errors
size *= INTERVALS_IN_SECONDS[timeScale] * 1000;
const strings = [];
// after first value >= 1 print only $decimals more
let decrementDecimals = false;
let decimalsCount = 0;
if (decimals !== null && decimals !== undefined) {
decimalsCount = decimals as number;
}
for (let i = 0; i < UNITS.length && decimalsCount >= 0; i++) {
const interval = INTERVALS_IN_SECONDS[UNITS[i]] * 1000;
const value = size / interval;
if (value >= 1 || decrementDecimals) {
decrementDecimals = true;
const floor = Math.floor(value);
const unit = UNITS[i] + (floor !== 1 ? 's' : '');
strings.push(floor + ' ' + unit);
size = size % interval;
decimalsCount--;
}
}
return { text: strings.join(', ') };
}
export function toClock(size: number, decimals?: DecimalCount): FormattedValue {
if (size === null) {
return { text: '' };
}
// < 1 second
if (size < 1000) {
return {
text: toUtc(size).format('SSS\\m\\s'),
};
}
// < 1 minute
if (size < 60000) {
let format = 'ss\\s:SSS\\m\\s';
if (decimals === 0) {
format = 'ss\\s';
}
return { text: toUtc(size).format(format) };
}
// < 1 hour
if (size < 3600000) {
let format = 'mm\\m:ss\\s:SSS\\m\\s';
if (decimals === 0) {
format = 'mm\\m';
} else if (decimals === 1) {
format = 'mm\\m:ss\\s';
}
return { text: toUtc(size).format(format) };
}
let format = 'mm\\m:ss\\s:SSS\\m\\s';
const hours = `${('0' + Math.floor(duration(size, 'milliseconds').asHours())).slice(-2)}h`;
if (decimals === 0) {
format = '';
} else if (decimals === 1) {
format = 'mm\\m';
} else if (decimals === 2) {
format = 'mm\\m:ss\\s';
}
const text = format ? `${hours}:${toUtc(size).format(format)}` : hours;
return { text };
}
export function toDurationInMilliseconds(size: number, decimals: DecimalCount): FormattedValue {
return toDuration(size, decimals, Interval.Millisecond);
}
export function toDurationInSeconds(size: number, decimals: DecimalCount): FormattedValue {
return toDuration(size, decimals, Interval.Second);
}
export function toDurationInHoursMinutesSeconds(size: number): FormattedValue {
if (size < 0) {
const v = toDurationInHoursMinutesSeconds(-size);
if (!v.suffix) {
v.suffix = '';
}
v.suffix += ' ago';
return v;
}
const strings = [];
const numHours = Math.floor(size / 3600);
const numMinutes = Math.floor((size % 3600) / 60);
const numSeconds = Math.floor((size % 3600) % 60);
numHours > 9 ? strings.push('' + numHours) : strings.push('0' + numHours);
numMinutes > 9 ? strings.push('' + numMinutes) : strings.push('0' + numMinutes);
numSeconds > 9 ? strings.push('' + numSeconds) : strings.push('0' + numSeconds);
return { text: strings.join(':') };
}
export function toDurationInDaysHoursMinutesSeconds(size: number): FormattedValue {
if (size < 0) {
const v = toDurationInDaysHoursMinutesSeconds(-size);
if (!v.suffix) {
v.suffix = '';
}
v.suffix += ' ago';
return v;
}
let dayString = '';
const numDays = Math.floor(size / (24 * 3600));
if (numDays > 0) {
dayString = numDays + ' d ';
}
const hmsString = toDurationInHoursMinutesSeconds(size - numDays * 24 * 3600);
return { text: dayString + hmsString.text };
}
export function toTimeTicks(size: number, decimals: DecimalCount): FormattedValue {
return toSeconds(size / 100, decimals);
}
export function toClockMilliseconds(size: number, decimals: DecimalCount): FormattedValue {
return toClock(size, decimals);
}
export function toClockSeconds(size: number, decimals: DecimalCount): FormattedValue {
return toClock(size * 1000, decimals);
}
export function toDateTimeValueFormatter(pattern: string, todayPattern?: string): ValueFormatter {
return (value: number, decimals: DecimalCount, scaledDecimals: DecimalCount, timeZone?: TimeZone): FormattedValue => {
if (todayPattern) {
if (dateTime().isSame(value, 'day')) {
return {
text: dateTimeFormat(value, { format: todayPattern, timeZone }),
};
}
}
return { text: dateTimeFormat(value, { format: pattern, timeZone }) };
};
}
export const dateTimeAsIso = toDateTimeValueFormatter('YYYY-MM-DD HH:mm:ss');
export const dateTimeAsIsoNoDateIfToday = toDateTimeValueFormatter('YYYY-MM-DD HH:mm:ss', 'HH:mm:ss');
export const dateTimeAsUS = toDateTimeValueFormatter('MM/DD/YYYY h:mm:ss a');
export const dateTimeAsUSNoDateIfToday = toDateTimeValueFormatter('MM/DD/YYYY h:mm:ss a', 'h:mm:ss a');
export function getDateTimeAsLocalFormat() {
return toDateTimeValueFormatter(
localTimeFormat({
year: 'numeric',
month: '2-digit',
day: '2-digit',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
})
);
}
export function getDateTimeAsLocalFormatNoDateIfToday() {
return toDateTimeValueFormatter(
localTimeFormat({
year: 'numeric',
month: '2-digit',
day: '2-digit',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
}),
localTimeFormat({
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
})
);
}
export function dateTimeSystemFormatter(
value: number,
decimals: DecimalCount,
scaledDecimals: DecimalCount,
timeZone?: TimeZone,
showMs?: boolean
): FormattedValue {
return {
text: dateTimeFormat(value, {
format: showMs ? systemDateFormats.fullDateMS : systemDateFormats.fullDate,
timeZone,
}),
};
}
export function dateTimeFromNow(
value: number,
decimals: DecimalCount,
scaledDecimals: DecimalCount,
timeZone?: TimeZone
): FormattedValue {
return { text: dateTimeFormatTimeAgo(value, { timeZone }) };
}
| packages/grafana-data/src/valueFormats/dateTimeFormatters.ts | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.0005583381280303001,
0.00018344046839047223,
0.00016496142779942602,
0.00017522119742352515,
0.00005791167131974362
]
|
{
"id": 1,
"code_window": [
" required,\n",
" onBlur,\n",
" tooltip,\n",
" grow,\n",
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep"
],
"after_edit": [
" interactive,\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 30
} | import { css } from '@emotion/css';
import React from 'react';
import { PluginTypeCode } from '../types';
interface PluginTypeIconProps {
typeCode: PluginTypeCode;
size: number;
}
export const PluginTypeIcon = ({ typeCode, size }: PluginTypeIconProps) => {
const imageUrl = ((typeCode: string) => {
switch (typeCode) {
case 'panel':
return 'data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNSIgaGVpZ2h0PSIyNC4zMzEiIHZpZXdCb3g9Ii02MiA2My42NjkgMjUgMjQuMzMxIj48dGl0bGU+aWNvbl9kYXRhLXNvdXJjZTwvdGl0bGU+PHBhdGggZD0iTS00MS40MDUgNjMuNjczaC0xNi4xOUE0LjQxIDQuNDEgMCAwIDAtNjIgNjguMDc4djE1LjUxN0E0LjQxIDQuNDEgMCAwIDAtNTcuNTk1IDg4aDE2LjE5QTQuNDEgNC40MSAwIDAgMC0zNyA4My41OTVWNjguMDc4YTQuNDEgNC40MSAwIDAgMC00LjQwNS00LjQwNXptMy43MjcgMTkuOTIyYTMuNzMxIDMuNzMxIDAgMCAxLTMuNzI3IDMuNzI3aC0xNi4xOWEzLjczMSAzLjczMSAwIDAgMS0zLjcyNy0zLjcyN1Y2OC4wNzhhMy43MzEgMy43MzEgMCAwIDEgMy43MjctMy43MjdoMTYuMTlhMy43MzEgMy43MzEgMCAwIDEgMy43MjcgMy43Mjd2MTUuNTE3eiIgZmlsbD0iIzg5ODk4OSIvPjxnIGZpbGw9IiM4OTg5ODkiPjxwYXRoIGQ9Ik0tNTYuNDU3IDg1LjE0N2gxMy45MTRhMi4zNSAyLjM1IDAgMCAwIDIuMjctMS43NTloLTE4LjQ1NGEyLjM1MSAyLjM1MSAwIDAgMCAyLjI3IDEuNzU5em0uMDQ3LTguNzA2bDIuMDg3LjgzLjgxLS45NzdoLTUuMjk5djEuNjgzbDEuNjM2LTEuNDA4YS43NTEuNzUxIDAgMCAxIC43NjYtLjEyOHptNS44MzktMy42OTRoLTguMjQxdjIuODI4aDUuODk1em03Ljk0OSAyLjgyOGguNzM5bDEuNjk1LTEuMzA0di0xLjUyNGgtNC4yN3ptLTE2LjE5IDQuMzgxdjIuNzEzaDE4LjYyNHYtMi44MjhoLTE4LjQ5MXptOS43NjYtOS4wNDdhLjc0OC43NDggMCAwIDEgLjg5MS0uMjAybDIuODY5IDEuMzIyaDUuMDk5VjY5LjJoLTE4LjYyNXYyLjgyOGg4LjgzOGwuOTI4LTEuMTE5em02LjUwMy00LjM4N2gtMTMuOTE0YTIuMzUyIDIuMzUyIDAgMCAwLTIuMzE2IDEuOTZoMTguNTQ1YTIuMzUgMi4zNSAwIDAgMC0yLjMxNS0xLjk2em0tNC43NjggNi4yMjVoLTEuMzExbC0yLjM0NiAyLjgyOGg2LjU1OGwtMS4zODItMi4xMjh6Ii8+PHBhdGggZD0iTS00Mi4xMDUgNzcuNjM5YS43NDcuNzQ3IDAgMCAxLTEuMDg2LS4xODZsLS43NTItMS4xNThoLTcuNjIxbC0xLjk1MiAyLjM1NGEuNzUuNzUgMCAwIDEtLjg1NC4yMTlsLTIuMTcyLS44NjQtMS4zMDEgMS4xMmgxNy42NTd2LTIuODI4aC0uMTdsLTEuNzQ5IDEuMzQzeiIvPjwvZz48L3N2Zz4=';
case 'datasource':
return 'data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4NCjwhLS0gR2VuZXJhdG9yOiBBZG9iZSBJbGx1c3RyYXRvciAxOS4wLjEsIFNWRyBFeHBvcnQgUGx1Zy1JbiAuIFNWRyBWZXJzaW9uOiA2LjAwIEJ1aWxkIDApICAtLT4NCjwhRE9DVFlQRSBzdmcgUFVCTElDICItLy9XM0MvL0RURCBTVkcgMS4xLy9FTiIgImh0dHA6Ly93d3cudzMub3JnL0dyYXBoaWNzL1NWRy8xLjEvRFREL3N2ZzExLmR0ZCI+DQo8c3ZnIHZlcnNpb249IjEuMSIgaWQ9IkxheWVyXzEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHg9IjBweCIgeT0iMHB4Ig0KCSB3aWR0aD0iMjVweCIgaGVpZ2h0PSIyNC4zcHgiIHZpZXdCb3g9Ii0xODcgNzMuNyAyNSAyNC4zIiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IC0xODcgNzMuNyAyNSAyNC4zOyIgeG1sOnNwYWNlPSJwcmVzZXJ2ZSINCgk+DQo8c3R5bGUgdHlwZT0idGV4dC9jc3MiPg0KCS5zdDB7ZmlsbDojNWE1YTVhO30NCjwvc3R5bGU+DQo8Zz4NCgk8dGl0bGU+aWNvbl9kYXRhLXNvdXJjZTwvdGl0bGU+DQoJPGc+DQoJCTxwYXRoIGNsYXNzPSJzdDAiIGQ9Ik0tMTc0LjUsOTQuM2MtNS41LDAtMTAuMi0xLjYtMTIuMy00Yy0wLjEsMC4zLTAuMiwwLjYtMC4yLDFjMCwzLjIsNS43LDYsMTIuNSw2czEyLjUtMi43LDEyLjUtNg0KCQkJYzAtMC4zLTAuMS0wLjctMC4yLTFDLTE2NC40LDkyLjctMTY5LDk0LjMtMTc0LjUsOTQuM3oiLz4NCgkJPHBhdGggY2xhc3M9InN0MCIgZD0iTS0xNzQuNSw4OC45Yy01LjUsMC0xMC4yLTEuNi0xMi4zLTRjLTAuMSwwLjMtMC4yLDAuNi0wLjIsMWMwLDMuMiw1LjcsNiwxMi41LDZzMTIuNS0yLjcsMTIuNS02DQoJCQljMC0wLjMtMC4xLTAuNy0wLjItMUMtMTY0LjQsODcuMy0xNjksODguOS0xNzQuNSw4OC45eiIvPg0KCQk8cGF0aCBjbGFzcz0ic3QwIiBkPSJNLTE4Nyw4MC40YzAsMy4yLDUuNyw2LDEyLjUsNnMxMi41LTIuNywxMi41LTZzLTUuNy02LTEyLjUtNlMtMTg3LDc3LjEtMTg3LDgwLjR6Ii8+DQoJPC9nPg0KPC9nPg0KPC9zdmc+DQo=';
case 'app':
return 'data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyMi45NzgiIGhlaWdodD0iMjUiIHZpZXdCb3g9IjAgMCAyMi45NzggMjUiPjx0aXRsZT5pY29uX2FwcHM8L3RpdGxlPjxwYXRoIGQ9Ik0yMS4yMjQgMTEuMjFhMS43NiAxLjc2IDAgMCAwLTEuNjgyIDEuMjU3SDE0Ljg5YTQuMjMgNC4yMyAwIDAgMC0uMy0xLjE0MmwzLjExNC0xLjhBMi4wNSAyLjA1IDAgMSAwIDE3LjEyIDguMWExLjk4NiAxLjk4NiAwIDAgMCAuMDguNTY1bC0zLjExOCAxLjhhNC4yNDMgNC4yNDMgMCAwIDAtLjgzNS0uODM1bC41ODYtMS4wMTVhMi4xNjUgMi4xNjUgMCAwIDAgLjU5My4wODYgMi4xMTYgMi4xMTYgMCAxIDAtMS40NS0uNThsLS41OCAxLjAxMmEzLjk1NSAzLjk1NSAwIDAgMC0xLjE0LS4zVjMuMDA4YTEuNTQ3IDEuNTQ3IDAgMSAwLTEgMHY1LjgxN2E0LjIzIDQuMjMgMCAwIDAtMS4xNDMuM2wtMi4wNi0zLjU2MkExLjY4NCAxLjY4NCAwIDAgMCA3LjUxIDQuNGExLjcxIDEuNzEgMCAxIDAtMS4zMiAxLjY2bDIuMDYgMy41N2E0LjMyMyA0LjMyMyAwIDAgMC0uODQzLjg0M2wtMy41NjYtMi4wNmExLjc2IDEuNzYgMCAwIDAgLjA0NS0uMzkgMS43IDEuNyAwIDEgMC0xLjcgMS43IDEuNjg2IDEuNjg2IDAgMCAwIDEuMTU1LS40NTNsMy41NyAyLjA2YTQuMDkgNC4wOSAwIDAgMC0uMyAxLjEzM0g1LjIwNmEyLjMwNSAyLjMwNSAwIDEgMCAwIDFoMS40MDdhNC4yMyA0LjIzIDAgMCAwIC4zIDEuMTQyTDMuMTAyIDE2LjhhMS44MjMgMS44MjMgMCAxIDAgLjU1IDEuMyAxLjc3NSAxLjc3NSAwIDAgMC0uMDYzLS40MzhsMy44MjItMi4yMDZhNC4yIDQuMiAwIDAgMCAuODQzLjg0bC0yLjk4IDUuMTkzYTEuNzI3IDEuNzI3IDAgMCAwLS40MTMtLjA1IDEuNzggMS43OCAwIDEgMCAxLjI3Ny41NGwyLjk4LTUuMTc4YTQuMDkgNC4wOSAwIDAgMCAxLjEzMy4zdjEuNDA4YTIuMDU1IDIuMDU1IDAgMSAwIC45OSAwVjE3LjFhNC4yMyA0LjIzIDAgMCAwIDEuMTQzLS4zbDIuNDYgNC4yNmExLjgyNCAxLjgyNCAwIDEgMCAxLjMwNi0uNTUyIDEuNzc4IDEuNzc4IDAgMCAwLS40NDYuMDU3bC0yLjQ2LTQuMjY1YTMuOTYgMy45NiAwIDAgMCAuODI2LS44MjdsLjQ0Ni4yNThhMi4zMjQgMi4zMjQgMCAwIDAtLjEyLjczOCAyLjQgMi40IDAgMSAwIC42Mi0xLjZsLS40NDMtLjI1NGE0LjE1NSA0LjE1NSAwIDAgMCAuMzEtMS4xNTRoNC42NmExLjc1MyAxLjc1MyAwIDEgMCAxLjY4LTIuMjV6bTAgMi43MWEuOTU4Ljk1OCAwIDEgMSAuOTU4LS45NTguOTYuOTYgMCAwIDEtLjk1OC45NnpNMTAuNzUgMTYuMTRhMy4xNzcgMy4xNzcgMCAxIDEgMy4xNzctMy4xNzggMy4xOCAzLjE4IDAgMCAxLTMuMTc3IDMuMTc3em03LjE2My04LjA0YTEuMjYgMS4yNiAwIDEgMSAxLjI2IDEuMjYgMS4yNiAxLjI2IDAgMCAxLTEuMjYtMS4yNnpNMTUuNzQgNi41OTRhMS4zMTQgMS4zMTQgMCAxIDEtMS4zMTUtMS4zMTQgMS4zMTUgMS4zMTUgMCAwIDEgMS4zMTQgMS4zMTR6TTkuOTk2IDEuNTQ4YS43NTMuNzUzIDAgMSAxIC43NTMuNzUzLjc1NC43NTQgMCAwIDEtLjc1My0uNzUyek00Ljg5NyA0LjRhLjkxLjkxIDAgMSAxIC45MS45MS45MS45MSAwIDAgMS0uOTEtLjkxek0yLjE5IDguOTM2YS45MS45MSAwIDEgMSAuOTA4LS45MS45MS45MSAwIDAgMS0uOTEuOTF6bS43NyA1LjUyNmExLjUwNiAxLjUwNiAwIDEgMSAxLjUwNS0xLjUwNiAxLjUwOCAxLjUwOCAwIDAgMS0xLjUwOCAxLjUwNnptLS4xIDMuNjQ2YTEuMDMyIDEuMDMyIDAgMSAxLTEuMDMzLTEuMDMyIDEuMDMzIDEuMDMzIDAgMCAxIDEuMDMyIDEuMDMyem0yLjk4NyA1LjExYS45ODYuOTg2IDAgMSAxLS45ODYtLjk4Ny45ODcuOTg3IDAgMCAxIC45ODcuOTg3em00LjktMS40NmExLjI2IDEuMjYgMCAxIDEgMS4yNi0xLjI2IDEuMjYgMS4yNiAwIDAgMS0xLjI1NyAxLjI2em02LjQ0Mi41N2ExLjAzMiAxLjAzMiAwIDEgMS0xLjAzMy0xLjAyOCAxLjAzMyAxLjAzMyAwIDAgMSAxLjAzMiAxLjAyOHptLS40LTcuNDU4YTEuNiAxLjYgMCAxIDEtMS42IDEuNiAxLjYgMS42IDAgMCAxIDEuNi0xLjZ6IiBmaWxsPSIjODk4OTg5Ii8+PC9zdmc+';
default:
return undefined;
}
})(typeCode);
return imageUrl ? (
<div
className={css`
display: inline-block;
background-image: url(${imageUrl});
background-size: ${size}px;
background-repeat: no-repeat;
width: ${size}px;
height: ${size}px;
`}
/>
) : null;
};
| public/app/features/plugins/admin/components/PluginTypeIcon.tsx | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00041287485510110855,
0.00023239031725097448,
0.00016978269559331238,
0.00017345184460282326,
0.00010423437197459862
]
|
{
"id": 1,
"code_window": [
" required,\n",
" onBlur,\n",
" tooltip,\n",
" grow,\n",
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep"
],
"after_edit": [
" interactive,\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "add",
"edit_start_line_idx": 30
} | package cloudmonitoring
import (
"encoding/json"
"io/ioutil"
"net/http"
"net/http/httptest"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/stretchr/testify/require"
)
func Test_parseResourcePath(t *testing.T) {
tests := []struct {
name string
original string
expectedTarget string
Err require.ErrorAssertionFunc
}{
{
"Path with a subscription",
"/cloudmonitoring/v3/projects/foo",
"/v3/projects/foo",
require.NoError,
},
{
"Malformed path",
"/projects?foo",
"",
require.Error,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
target, err := getTarget(tt.original)
if target != tt.expectedTarget {
t.Errorf("Unexpected target %s expecting %s", target, tt.expectedTarget)
}
tt.Err(t, err)
})
}
}
func Test_doRequest(t *testing.T) {
// test that it forwards the header and body over multiple calls
elements := []string{"1", "2", "3"}
index := 0
fakeResponseFn := func(input []byte) ([]json.RawMessage, string, error) {
results := []json.RawMessage{input}
if index < len(elements) {
return results, "token", nil
}
return results, "", nil
}
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Add("foo", "bar")
_, err := w.Write([]byte(elements[index]))
index++
if err != nil {
t.Fatal(err)
}
}))
req, err := http.NewRequest(http.MethodGet, srv.URL, nil)
if err != nil {
t.Error(err)
}
rw := httptest.NewRecorder()
res := getResources(rw, req, srv.Client(), fakeResponseFn)
if res.Header().Get("foo") != "bar" {
t.Errorf("Unexpected headers: %v", res.Header())
}
result := rw.Result()
body, err := ioutil.ReadAll(result.Body)
if err != nil {
t.Error(err)
}
err = result.Body.Close()
if err != nil {
t.Error(err)
}
if string(body) != "[1,2,3]" {
t.Errorf("Unexpected body: %v", string(body))
}
}
type fakeInstance struct {
services map[string]datasourceService
}
func (f *fakeInstance) Get(pluginContext backend.PluginContext) (instancemgmt.Instance, error) {
return &datasourceInfo{
services: f.services,
}, nil
}
func (f *fakeInstance) Do(pluginContext backend.PluginContext, fn instancemgmt.InstanceCallbackFunc) error {
return nil
}
func Test_setRequestVariables(t *testing.T) {
s := Service{
im: &fakeInstance{
services: map[string]datasourceService{
cloudMonitor: {
url: routes[cloudMonitor].url,
client: &http.Client{},
},
},
},
}
req, err := http.NewRequest(http.MethodGet, "http://foo/cloudmonitoring/v3/projects/bar/metricDescriptors", nil)
if err != nil {
t.Fatalf("Unexpected error %v", err)
}
_, _, err = s.setRequestVariables(req, cloudMonitor)
if err != nil {
t.Fatalf("Unexpected error %v", err)
}
expectedURL := "https://monitoring.googleapis.com/v3/projects/bar/metricDescriptors"
if req.URL.String() != expectedURL {
t.Errorf("Unexpected result URL. Got %s, expecting %s", req.URL.String(), expectedURL)
}
}
func Test_processData_functions(t *testing.T) {
// metricDescriptors
metricDescriptorResp := metricDescriptorResponse{
Descriptors: []metricDescriptor{
{
ValueType: "INT64",
MetricKind: "DELTA",
Type: "actions.googleapis.com/smarthome_action/local_event_count",
Unit: "1",
Service: "foo",
ServiceShortName: "bar",
DisplayName: "Local event count",
Description: "baz",
},
},
Token: "foo",
}
marshaledMDResponse, _ := json.Marshal(metricDescriptorResp)
metricDescriptorResult := []metricDescriptor{
{
ValueType: "INT64",
MetricKind: "DELTA",
Type: "actions.googleapis.com/smarthome_action/local_event_count",
Unit: "1",
Service: "actions.googleapis.com",
ServiceShortName: "actions",
DisplayName: "Local event count",
Description: "baz",
},
}
marshaledMDResult, _ := json.Marshal(metricDescriptorResult)
// services
serviceResp := serviceResponse{
Services: []serviceDescription{
{
Name: "blah/foo",
DisplayName: "bar",
},
{
Name: "abc",
DisplayName: "",
},
},
}
marshaledServiceResponse, _ := json.Marshal(serviceResp)
serviceResult := []selectableValue{
{
Value: "foo",
Label: "bar",
},
{
Value: "abc",
Label: "abc",
},
}
marshaledServiceResult, _ := json.Marshal(serviceResult)
// slos
sloResp := sloResponse{
SLOs: []sloDescription{
{
Name: "blah/foo",
DisplayName: "bar",
Goal: 0.1,
},
{
Name: "abc",
DisplayName: "xyz",
Goal: 0.2,
},
},
}
marshaledSLOResponse, _ := json.Marshal(sloResp)
sloResult := []selectableValue{
{
Value: "foo",
Label: "bar",
Goal: 0.1,
},
{
Value: "abc",
Label: "xyz",
Goal: 0.2,
},
}
marshaledSLOResult, _ := json.Marshal(sloResult)
// cloudresourcemanager
cloudResourceResp := projectResponse{
Projects: []projectDescription{
{
ProjectID: "foo",
Name: "bar",
},
{
ProjectID: "abc",
Name: "abc",
},
},
}
marshaledCRResponse, _ := json.Marshal(cloudResourceResp)
tests := []struct {
name string
responseFn processResponse
input []byte
result []byte
token string
}{
{
"metricDescriptor",
processMetricDescriptors,
marshaledMDResponse,
marshaledMDResult,
"foo",
},
{
"services",
processServices,
marshaledServiceResponse,
marshaledServiceResult,
"",
},
{
"slos",
processSLOs,
marshaledSLOResponse,
marshaledSLOResult,
"",
},
{
"cloudresourcemanager",
processProjects,
marshaledCRResponse,
marshaledServiceResult,
"",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
results, token, err := test.responseFn(test.input)
if err != nil {
t.Errorf("Unexpected error %v", err)
}
if token != test.token {
t.Errorf("Unexpected token. Got %s, expecting %s", token, test.token)
}
res, err := json.Marshal(results)
if err != nil {
t.Errorf("Unexpected error %v", err)
}
if string(test.result) != string(res) {
t.Errorf("Unexpected result. Got %s, expecting %s", res, test.result)
}
})
}
}
| pkg/tsdb/cloudmonitoring/resource_handler_test.go | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00019913299183826894,
0.0001727718918118626,
0.00016610987950116396,
0.00017227036005351692,
0.000005514250005944632
]
|
{
"id": 2,
"code_window": [
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n",
" <InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n",
" <Input\n",
" type=\"text\"\n",
" id={name}\n",
" name={name}\n",
" placeholder={placeholder}\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" <InlineField interactive={interactive} label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "replace",
"edit_start_line_idx": 32
} | import React, { FormEvent, PropsWithChildren, ReactElement } from 'react';
import { InlineField, Input, PopoverContent } from '@grafana/ui';
interface VariableTextFieldProps {
value: string;
name: string;
placeholder: string;
onChange: (event: FormEvent<HTMLInputElement>) => void;
testId?: string;
tooltip?: PopoverContent;
required?: boolean;
width?: number;
labelWidth?: number;
grow?: boolean;
onBlur?: (event: FormEvent<HTMLInputElement>) => void;
}
export function VariableTextField({
value,
name,
placeholder,
onChange,
testId,
width,
labelWidth,
required,
onBlur,
tooltip,
grow,
}: PropsWithChildren<VariableTextFieldProps>): ReactElement {
return (
<InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>
<Input
type="text"
id={name}
name={name}
placeholder={placeholder}
value={value}
onChange={onChange}
onBlur={onBlur}
width={grow ? undefined : width ?? 25}
data-testid={testId}
required={required}
/>
</InlineField>
);
}
| public/app/features/variables/editor/VariableTextField.tsx | 1 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.988664448261261,
0.19934184849262238,
0.00018774728232529014,
0.0024402043782174587,
0.39466288685798645
]
|
{
"id": 2,
"code_window": [
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n",
" <InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n",
" <Input\n",
" type=\"text\"\n",
" id={name}\n",
" name={name}\n",
" placeholder={placeholder}\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" <InlineField interactive={interactive} label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "replace",
"edit_start_line_idx": 32
} | package api
import (
"encoding/json"
"errors"
"net/http"
"net/http/httptest"
"path/filepath"
"testing"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/login/loginservice"
"github.com/grafana/grafana/pkg/services/login/logintest"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/auth"
"github.com/grafana/grafana/pkg/services/ldap"
"github.com/grafana/grafana/pkg/services/multildap"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type LDAPMock struct {
Results []*models.ExternalUserInfo
}
var userSearchResult *models.ExternalUserInfo
var userSearchConfig ldap.ServerConfig
var userSearchError error
var pingResult []*multildap.ServerStatus
var pingError error
func (m *LDAPMock) Ping() ([]*multildap.ServerStatus, error) {
return pingResult, pingError
}
func (m *LDAPMock) Login(query *models.LoginUserQuery) (*models.ExternalUserInfo, error) {
return &models.ExternalUserInfo{}, nil
}
func (m *LDAPMock) Users(logins []string) ([]*models.ExternalUserInfo, error) {
s := []*models.ExternalUserInfo{}
return s, nil
}
func (m *LDAPMock) User(login string) (*models.ExternalUserInfo, ldap.ServerConfig, error) {
return userSearchResult, userSearchConfig, userSearchError
}
// ***
// GetUserFromLDAP tests
// ***
func getUserFromLDAPContext(t *testing.T, requestURL string, searchOrgRst []*models.OrgDTO) *scenarioContext {
t.Helper()
sc := setupScenarioContext(t, requestURL)
origLDAP := setting.LDAPEnabled
setting.LDAPEnabled = true
t.Cleanup(func() { setting.LDAPEnabled = origLDAP })
hs := &HTTPServer{Cfg: setting.NewCfg(), ldapGroups: ldap.ProvideGroupsService(), SQLStore: &mockstore.SQLStoreMock{ExpectedSearchOrgList: searchOrgRst}}
sc.defaultHandler = routing.Wrap(func(c *models.ReqContext) response.Response {
sc.context = c
return hs.GetUserFromLDAP(c)
})
sc.m.Get("/api/admin/ldap/:username", sc.defaultHandler)
sc.resp = httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodGet, requestURL, nil)
sc.req = req
sc.exec()
return sc
}
func TestGetUserFromLDAPAPIEndpoint_UserNotFound(t *testing.T) {
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
userSearchResult = nil
sc := getUserFromLDAPContext(t, "/api/admin/ldap/user-that-does-not-exist", []*models.OrgDTO{})
require.Equal(t, sc.resp.Code, http.StatusNotFound)
assert.JSONEq(t, "{\"message\":\"No user was found in the LDAP server(s) with that username\"}", sc.resp.Body.String())
}
func TestGetUserFromLDAPAPIEndpoint_OrgNotfound(t *testing.T) {
isAdmin := true
userSearchResult = &models.ExternalUserInfo{
Name: "John Doe",
Email: "[email protected]",
Login: "johndoe",
Groups: []string{"cn=admins,ou=groups,dc=grafana,dc=org"},
OrgRoles: map[int64]models.RoleType{1: models.ROLE_ADMIN, 2: models.ROLE_VIEWER},
IsGrafanaAdmin: &isAdmin,
}
userSearchConfig = ldap.ServerConfig{
Attr: ldap.AttributeMap{
Name: "ldap-name",
Surname: "ldap-surname",
Email: "ldap-email",
Username: "ldap-username",
},
Groups: []*ldap.GroupToOrgRole{
{
GroupDN: "cn=admins,ou=groups,dc=grafana,dc=org",
OrgId: 1,
OrgRole: models.ROLE_ADMIN,
},
{
GroupDN: "cn=admins,ou=groups,dc=grafana,dc=org",
OrgId: 2,
OrgRole: models.ROLE_VIEWER,
},
},
}
mockOrgSearchResult := []*models.OrgDTO{
{Id: 1, Name: "Main Org."},
}
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
sc := getUserFromLDAPContext(t, "/api/admin/ldap/johndoe", mockOrgSearchResult)
require.Equal(t, http.StatusBadRequest, sc.resp.Code)
var res map[string]interface{}
err := json.Unmarshal(sc.resp.Body.Bytes(), &res)
assert.NoError(t, err)
assert.Equal(t, "unable to find organization with ID '2'", res["error"])
assert.Equal(t, "An organization was not found - Please verify your LDAP configuration", res["message"])
}
func TestGetUserFromLDAPAPIEndpoint(t *testing.T) {
isAdmin := true
userSearchResult = &models.ExternalUserInfo{
Name: "John Doe",
Email: "[email protected]",
Login: "johndoe",
Groups: []string{"cn=admins,ou=groups,dc=grafana,dc=org", "another-group-not-matched"},
OrgRoles: map[int64]models.RoleType{1: models.ROLE_ADMIN},
IsGrafanaAdmin: &isAdmin,
}
userSearchConfig = ldap.ServerConfig{
Attr: ldap.AttributeMap{
Name: "ldap-name",
Surname: "ldap-surname",
Email: "ldap-email",
Username: "ldap-username",
},
Groups: []*ldap.GroupToOrgRole{
{
GroupDN: "cn=admins,ou=groups,dc=grafana,dc=org",
OrgId: 1,
OrgRole: models.ROLE_ADMIN,
},
{
GroupDN: "cn=admins2,ou=groups,dc=grafana,dc=org",
OrgId: 1,
OrgRole: models.ROLE_ADMIN,
},
},
}
mockOrgSearchResult := []*models.OrgDTO{
{Id: 1, Name: "Main Org."},
}
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
sc := getUserFromLDAPContext(t, "/api/admin/ldap/johndoe", mockOrgSearchResult)
assert.Equal(t, sc.resp.Code, http.StatusOK)
expected := `
{
"name": {
"cfgAttrValue": "ldap-name", "ldapValue": "John"
},
"surname": {
"cfgAttrValue": "ldap-surname", "ldapValue": "Doe"
},
"email": {
"cfgAttrValue": "ldap-email", "ldapValue": "[email protected]"
},
"login": {
"cfgAttrValue": "ldap-username", "ldapValue": "johndoe"
},
"isGrafanaAdmin": true,
"isDisabled": false,
"roles": [
{ "orgId": 1, "orgRole": "Admin", "orgName": "Main Org.", "groupDN": "cn=admins,ou=groups,dc=grafana,dc=org" },
{ "orgId": 0, "orgRole": "", "orgName": "", "groupDN": "another-group-not-matched" }
],
"teams": null
}
`
assert.JSONEq(t, expected, sc.resp.Body.String())
}
func TestGetUserFromLDAPAPIEndpoint_WithTeamHandler(t *testing.T) {
isAdmin := true
userSearchResult = &models.ExternalUserInfo{
Name: "John Doe",
Email: "[email protected]",
Login: "johndoe",
Groups: []string{"cn=admins,ou=groups,dc=grafana,dc=org"},
OrgRoles: map[int64]models.RoleType{1: models.ROLE_ADMIN},
IsGrafanaAdmin: &isAdmin,
}
userSearchConfig = ldap.ServerConfig{
Attr: ldap.AttributeMap{
Name: "ldap-name",
Surname: "ldap-surname",
Email: "ldap-email",
Username: "ldap-username",
},
Groups: []*ldap.GroupToOrgRole{
{
GroupDN: "cn=admins,ou=groups,dc=grafana,dc=org",
OrgId: 1,
OrgRole: models.ROLE_ADMIN,
},
},
}
mockOrgSearchResult := []*models.OrgDTO{
{Id: 1, Name: "Main Org."},
}
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
sc := getUserFromLDAPContext(t, "/api/admin/ldap/johndoe", mockOrgSearchResult)
require.Equal(t, sc.resp.Code, http.StatusOK)
expected := `
{
"name": {
"cfgAttrValue": "ldap-name", "ldapValue": "John"
},
"surname": {
"cfgAttrValue": "ldap-surname", "ldapValue": "Doe"
},
"email": {
"cfgAttrValue": "ldap-email", "ldapValue": "[email protected]"
},
"login": {
"cfgAttrValue": "ldap-username", "ldapValue": "johndoe"
},
"isGrafanaAdmin": true,
"isDisabled": false,
"roles": [
{ "orgId": 1, "orgRole": "Admin", "orgName": "Main Org.", "groupDN": "cn=admins,ou=groups,dc=grafana,dc=org" }
],
"teams": null
}
`
assert.JSONEq(t, expected, sc.resp.Body.String())
}
// ***
// GetLDAPStatus tests
// ***
func getLDAPStatusContext(t *testing.T) *scenarioContext {
t.Helper()
requestURL := "/api/admin/ldap/status"
sc := setupScenarioContext(t, requestURL)
ldap := setting.LDAPEnabled
setting.LDAPEnabled = true
t.Cleanup(func() { setting.LDAPEnabled = ldap })
hs := &HTTPServer{Cfg: setting.NewCfg()}
sc.defaultHandler = routing.Wrap(func(c *models.ReqContext) response.Response {
sc.context = c
return hs.GetLDAPStatus(c)
})
sc.m.Get("/api/admin/ldap/status", sc.defaultHandler)
sc.resp = httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodGet, requestURL, nil)
sc.req = req
sc.exec()
return sc
}
func TestGetLDAPStatusAPIEndpoint(t *testing.T) {
pingResult = []*multildap.ServerStatus{
{Host: "10.0.0.3", Port: 361, Available: true, Error: nil},
{Host: "10.0.0.3", Port: 362, Available: true, Error: nil},
{Host: "10.0.0.5", Port: 361, Available: false, Error: errors.New("something is awfully wrong")},
}
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
sc := getLDAPStatusContext(t)
require.Equal(t, http.StatusOK, sc.resp.Code)
expected := `
[
{ "host": "10.0.0.3", "port": 361, "available": true, "error": "" },
{ "host": "10.0.0.3", "port": 362, "available": true, "error": "" },
{ "host": "10.0.0.5", "port": 361, "available": false, "error": "something is awfully wrong" }
]
`
assert.JSONEq(t, expected, sc.resp.Body.String())
}
// ***
// PostSyncUserWithLDAP tests
// ***
func postSyncUserWithLDAPContext(t *testing.T, requestURL string, preHook func(*testing.T, *scenarioContext), sqlstoremock sqlstore.Store) *scenarioContext {
t.Helper()
sc := setupScenarioContext(t, requestURL)
sc.authInfoService = &logintest.AuthInfoServiceFake{}
ldap := setting.LDAPEnabled
t.Cleanup(func() {
setting.LDAPEnabled = ldap
})
setting.LDAPEnabled = true
hs := &HTTPServer{
Cfg: sc.cfg,
AuthTokenService: auth.NewFakeUserAuthTokenService(),
SQLStore: sqlstoremock,
Login: loginservice.LoginServiceMock{},
authInfoService: sc.authInfoService,
}
sc.defaultHandler = routing.Wrap(func(c *models.ReqContext) response.Response {
sc.context = c
return hs.PostSyncUserWithLDAP(c)
})
sc.m.Post("/api/admin/ldap/sync/:id", sc.defaultHandler)
sc.resp = httptest.NewRecorder()
req, err := http.NewRequest(http.MethodPost, requestURL, nil)
require.NoError(t, err)
preHook(t, sc)
sc.req = req
sc.exec()
return sc
}
func TestPostSyncUserWithLDAPAPIEndpoint_Success(t *testing.T) {
sqlstoremock := mockstore.SQLStoreMock{}
sqlstoremock.ExpectedUser = &models.User{Login: "ldap-daniel", Id: 34}
sc := postSyncUserWithLDAPContext(t, "/api/admin/ldap/sync/34", func(t *testing.T, sc *scenarioContext) {
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
userSearchResult = &models.ExternalUserInfo{
Login: "ldap-daniel",
}
}, &sqlstoremock)
assert.Equal(t, http.StatusOK, sc.resp.Code)
expected := `
{
"message": "User synced successfully"
}
`
assert.JSONEq(t, expected, sc.resp.Body.String())
}
func TestPostSyncUserWithLDAPAPIEndpoint_WhenUserNotFound(t *testing.T) {
sqlstoremock := mockstore.SQLStoreMock{ExpectedError: models.ErrUserNotFound}
sc := postSyncUserWithLDAPContext(t, "/api/admin/ldap/sync/34", func(t *testing.T, sc *scenarioContext) {
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
}, &sqlstoremock)
assert.Equal(t, http.StatusNotFound, sc.resp.Code)
expected := `
{
"message": "user not found"
}
`
assert.JSONEq(t, expected, sc.resp.Body.String())
}
func TestPostSyncUserWithLDAPAPIEndpoint_WhenGrafanaAdmin(t *testing.T) {
sqlstoremock := mockstore.SQLStoreMock{ExpectedUser: &models.User{Login: "ldap-daniel", Id: 34}}
sc := postSyncUserWithLDAPContext(t, "/api/admin/ldap/sync/34", func(t *testing.T, sc *scenarioContext) {
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
userSearchError = multildap.ErrDidNotFindUser
sc.cfg.AdminUser = "ldap-daniel"
}, &sqlstoremock)
assert.Equal(t, http.StatusBadRequest, sc.resp.Code)
var res map[string]interface{}
err := json.Unmarshal(sc.resp.Body.Bytes(), &res)
assert.NoError(t, err)
assert.Equal(t, "did not find a user", res["error"])
assert.Equal(t, "Refusing to sync grafana super admin \"ldap-daniel\" - it would be disabled", res["message"])
}
func TestPostSyncUserWithLDAPAPIEndpoint_WhenUserNotInLDAP(t *testing.T) {
sqlstoremock := mockstore.SQLStoreMock{ExpectedUser: &models.User{Login: "ldap-daniel", Id: 34}}
sc := postSyncUserWithLDAPContext(t, "/api/admin/ldap/sync/34", func(t *testing.T, sc *scenarioContext) {
sc.authInfoService.ExpectedExternalUser = &models.ExternalUserInfo{IsDisabled: true, UserId: 34}
getLDAPConfig = func(*setting.Cfg) (*ldap.Config, error) {
return &ldap.Config{}, nil
}
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
userSearchResult = nil
userSearchError = multildap.ErrDidNotFindUser
}, &sqlstoremock)
assert.Equal(t, http.StatusBadRequest, sc.resp.Code)
expected := `
{
"message": "User not found in LDAP. Disabled the user without updating information"
}
`
assert.JSONEq(t, expected, sc.resp.Body.String())
}
// ***
// Access control tests for ldap endpoints
// ***
func TestLDAP_AccessControl(t *testing.T) {
tests := []accessControlTestCase{
{
url: "/api/admin/ldap/reload",
method: http.MethodPost,
desc: "ReloadLDAPCfg should return 200 for user with correct permissions",
expectedCode: http.StatusOK,
permissions: []*accesscontrol.Permission{
{Action: accesscontrol.ActionLDAPConfigReload},
},
},
{
url: "/api/admin/ldap/reload",
method: http.MethodPost,
desc: "ReloadLDAPCfg should return 403 for user without required permissions",
expectedCode: http.StatusForbidden,
permissions: []*accesscontrol.Permission{
{Action: "wrong"},
},
},
{
url: "/api/admin/ldap/status",
method: http.MethodGet,
desc: "GetLDAPStatus should return 200 for user without required permissions",
expectedCode: http.StatusOK,
permissions: []*accesscontrol.Permission{
{Action: accesscontrol.ActionLDAPStatusRead},
},
},
{
url: "/api/admin/ldap/status",
method: http.MethodGet,
desc: "GetLDAPStatus should return 200 for user without required permissions",
expectedCode: http.StatusForbidden,
permissions: []*accesscontrol.Permission{
{Action: "wrong"},
},
},
{
url: "/api/admin/ldap/test",
method: http.MethodGet,
desc: "GetUserFromLDAP should return 200 for user with required permissions",
expectedCode: http.StatusOK,
permissions: []*accesscontrol.Permission{
{Action: accesscontrol.ActionLDAPUsersRead},
},
},
{
url: "/api/admin/ldap/test",
method: http.MethodGet,
desc: "GetUserFromLDAP should return 403 for user without required permissions",
expectedCode: http.StatusForbidden,
permissions: []*accesscontrol.Permission{
{Action: "wrong"},
},
},
{
url: "/api/admin/ldap/sync/1",
method: http.MethodPost,
desc: "PostSyncUserWithLDAP should return 200 for user without required permissions",
expectedCode: http.StatusOK,
permissions: []*accesscontrol.Permission{
{Action: accesscontrol.ActionLDAPUsersSync},
},
},
{
url: "/api/admin/ldap/sync/1",
method: http.MethodPost,
desc: "PostSyncUserWithLDAP should return 200 for user without required permissions",
expectedCode: http.StatusForbidden,
permissions: []*accesscontrol.Permission{
{Action: "wrong"},
},
},
}
for _, test := range tests {
t.Run(test.desc, func(t *testing.T) {
enabled := setting.LDAPEnabled
configFile := setting.LDAPConfigFile
t.Cleanup(func() {
setting.LDAPEnabled = enabled
setting.LDAPConfigFile = configFile
})
setting.LDAPEnabled = true
path, err := filepath.Abs("../../conf/ldap.toml")
assert.NoError(t, err)
setting.LDAPConfigFile = path
cfg := setting.NewCfg()
cfg.LDAPEnabled = true
sc, hs := setupAccessControlScenarioContext(t, cfg, test.url, test.permissions)
hs.SQLStore = &mockstore.SQLStoreMock{ExpectedUser: &models.User{}}
hs.authInfoService = &logintest.AuthInfoServiceFake{}
hs.Login = &loginservice.LoginServiceMock{}
sc.resp = httptest.NewRecorder()
sc.req, err = http.NewRequest(test.method, test.url, nil)
assert.NoError(t, err)
// Add minimal setup to pass handler
userSearchResult = &models.ExternalUserInfo{}
userSearchError = nil
newLDAP = func(_ []*ldap.ServerConfig) multildap.IMultiLDAP {
return &LDAPMock{}
}
sc.exec()
assert.Equal(t, test.expectedCode, sc.resp.Code)
})
}
}
| pkg/api/ldap_debug_test.go | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00017540021508466452,
0.00017137863324023783,
0.00016461496124975383,
0.00017180237045977265,
0.0000029242917207739083
]
|
{
"id": 2,
"code_window": [
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n",
" <InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n",
" <Input\n",
" type=\"text\"\n",
" id={name}\n",
" name={name}\n",
" placeholder={placeholder}\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" <InlineField interactive={interactive} label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "replace",
"edit_start_line_idx": 32
} | import { useMemo } from 'react';
import { v4 as uuidv4 } from 'uuid';
import { getMessageFromError } from 'app/core/utils/errors';
import { AppNotification, AppNotificationSeverity, useDispatch } from 'app/types';
import { notifyApp } from '../actions';
const defaultSuccessNotification = {
title: '',
text: '',
severity: AppNotificationSeverity.Success,
icon: 'check',
};
const defaultWarningNotification = {
title: '',
text: '',
severity: AppNotificationSeverity.Warning,
icon: 'exclamation-triangle',
};
const defaultErrorNotification = {
title: '',
text: '',
severity: AppNotificationSeverity.Error,
icon: 'exclamation-triangle',
};
export const createSuccessNotification = (title: string, text = '', traceId?: string): AppNotification => ({
...defaultSuccessNotification,
title,
text,
id: uuidv4(),
timestamp: Date.now(),
showing: true,
});
export const createErrorNotification = (
title: string,
text: string | Error = '',
traceId?: string,
component?: React.ReactElement
): AppNotification => {
return {
...defaultErrorNotification,
text: getMessageFromError(text),
title,
id: uuidv4(),
traceId,
component,
timestamp: Date.now(),
showing: true,
};
};
export const createWarningNotification = (title: string, text = '', traceId?: string): AppNotification => ({
...defaultWarningNotification,
title,
text,
traceId,
id: uuidv4(),
timestamp: Date.now(),
showing: true,
});
/** Hook for showing toast notifications with varying severity (success, warning error).
* @example
* const notifyApp = useAppNotification();
* notifyApp.success('Success!', 'Some additional text');
* notifyApp.warning('Warning!');
* notifyApp.error('Error!');
*/
export function useAppNotification() {
const dispatch = useDispatch();
return useMemo(
() => ({
success: (title: string, text = '') => {
dispatch(notifyApp(createSuccessNotification(title, text)));
},
warning: (title: string, text = '', traceId?: string) => {
dispatch(notifyApp(createWarningNotification(title, text, traceId)));
},
error: (title: string, text = '', traceId?: string) => {
dispatch(notifyApp(createErrorNotification(title, text, traceId)));
},
}),
[dispatch]
);
}
| public/app/core/copy/appNotification.ts | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00018403217836748809,
0.00017316918820142746,
0.00016881170449778438,
0.0001717963896226138,
0.000004007700226793531
]
|
{
"id": 2,
"code_window": [
"}: PropsWithChildren<VariableTextFieldProps>): ReactElement {\n",
" return (\n",
" <InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n",
" <Input\n",
" type=\"text\"\n",
" id={name}\n",
" name={name}\n",
" placeholder={placeholder}\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" <InlineField interactive={interactive} label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>\n"
],
"file_path": "public/app/features/variables/editor/VariableTextField.tsx",
"type": "replace",
"edit_start_line_idx": 32
} | import classNames from 'classnames';
import React, { PureComponent, CSSProperties } from 'react';
import ReactGridLayout, { ItemCallback } from 'react-grid-layout';
import { connect, ConnectedProps } from 'react-redux';
import AutoSizer from 'react-virtualized-auto-sizer';
import { Subscription } from 'rxjs';
import { config } from '@grafana/runtime';
import { GRID_CELL_HEIGHT, GRID_CELL_VMARGIN, GRID_COLUMN_COUNT } from 'app/core/constants';
import { cleanAndRemoveMany } from 'app/features/panel/state/actions';
import { DashboardPanelsChangedEvent } from 'app/types/events';
import { AddPanelWidget } from '../components/AddPanelWidget';
import { DashboardRow } from '../components/DashboardRow';
import { DashboardModel, PanelModel } from '../state';
import { GridPos } from '../state/PanelModel';
import { DashboardPanel } from './DashboardPanel';
export interface OwnProps {
dashboard: DashboardModel;
editPanel: PanelModel | null;
viewPanel: PanelModel | null;
}
export interface State {
isLayoutInitialized: boolean;
}
const mapDispatchToProps = {
cleanAndRemoveMany,
};
const connector = connect(null, mapDispatchToProps);
export type Props = OwnProps & ConnectedProps<typeof connector>;
export class DashboardGridUnconnected extends PureComponent<Props, State> {
private panelMap: { [key: string]: PanelModel } = {};
private eventSubs = new Subscription();
private windowHeight = 1200;
private windowWidth = 1920;
private gridWidth = 0;
/** Used to keep track of mobile panel layout position */
private lastPanelBottom = 0;
constructor(props: Props) {
super(props);
this.state = {
isLayoutInitialized: false,
};
}
componentDidMount() {
const { dashboard } = this.props;
this.eventSubs.add(dashboard.events.subscribe(DashboardPanelsChangedEvent, this.triggerForceUpdate));
}
componentWillUnmount() {
this.eventSubs.unsubscribe();
this.props.cleanAndRemoveMany(Object.keys(this.panelMap));
}
buildLayout() {
const layout = [];
this.panelMap = {};
for (const panel of this.props.dashboard.panels) {
if (!panel.key) {
panel.key = `panel-${panel.id}-${Date.now()}`;
}
this.panelMap[panel.key] = panel;
if (!panel.gridPos) {
console.log('panel without gridpos');
continue;
}
const panelPos: any = {
i: panel.key,
x: panel.gridPos.x,
y: panel.gridPos.y,
w: panel.gridPos.w,
h: panel.gridPos.h,
};
if (panel.type === 'row') {
panelPos.w = GRID_COLUMN_COUNT;
panelPos.h = 1;
panelPos.isResizable = false;
panelPos.isDraggable = panel.collapsed;
}
layout.push(panelPos);
}
return layout;
}
onLayoutChange = (newLayout: ReactGridLayout.Layout[]) => {
for (const newPos of newLayout) {
this.panelMap[newPos.i!].updateGridPos(newPos);
}
this.props.dashboard.sortPanelsByGridPos();
// This is called on grid mount as it can correct invalid initial grid positions
if (!this.state.isLayoutInitialized) {
this.setState({ isLayoutInitialized: true });
}
};
triggerForceUpdate = () => {
this.forceUpdate();
};
updateGridPos = (item: ReactGridLayout.Layout, layout: ReactGridLayout.Layout[]) => {
this.panelMap[item.i!].updateGridPos(item);
};
onResize: ItemCallback = (layout, oldItem, newItem) => {
const panel = this.panelMap[newItem.i!];
panel.updateGridPos(newItem);
panel.configRev++; // trigger change handler
};
onResizeStop: ItemCallback = (layout, oldItem, newItem) => {
this.updateGridPos(newItem, layout);
};
onDragStop: ItemCallback = (layout, oldItem, newItem) => {
this.updateGridPos(newItem, layout);
};
getPanelScreenPos(panel: PanelModel, gridWidth: number): { top: number; bottom: number } {
let top = 0;
// mobile layout
if (gridWidth < config.theme2.breakpoints.values.md) {
// In mobile layout panels are stacked so we just add the panel vertical margin to the last panel bottom position
top = this.lastPanelBottom + GRID_CELL_VMARGIN;
} else {
// For top position we need to add back the vertical margin removed by translateGridHeightToScreenHeight
top = translateGridHeightToScreenHeight(panel.gridPos.y) + GRID_CELL_VMARGIN;
}
this.lastPanelBottom = top + translateGridHeightToScreenHeight(panel.gridPos.h);
return { top, bottom: this.lastPanelBottom };
}
renderPanels(gridWidth: number) {
const panelElements = [];
// Reset last panel bottom
this.lastPanelBottom = 0;
// This is to avoid layout re-flows, accessing window.innerHeight can trigger re-flow
// We assume here that if width change height might have changed as well
if (this.gridWidth !== gridWidth) {
this.windowHeight = window.innerHeight ?? 1000;
this.windowWidth = window.innerWidth;
this.gridWidth = gridWidth;
}
for (const panel of this.props.dashboard.panels) {
const panelClasses = classNames({ 'react-grid-item--fullscreen': panel.isViewing });
panelElements.push(
<GrafanaGridItem
key={panel.key}
className={panelClasses}
data-panelid={panel.id}
gridPos={panel.gridPos}
gridWidth={gridWidth}
windowHeight={this.windowHeight}
windowWidth={this.windowWidth}
isViewing={panel.isViewing}
>
{(width: number, height: number) => {
return this.renderPanel(panel, width, height);
}}
</GrafanaGridItem>
);
}
return panelElements;
}
renderPanel(panel: PanelModel, width: any, height: any) {
if (panel.type === 'row') {
return <DashboardRow key={panel.key} panel={panel} dashboard={this.props.dashboard} />;
}
if (panel.type === 'add-panel') {
return <AddPanelWidget key={panel.key} panel={panel} dashboard={this.props.dashboard} />;
}
return (
<DashboardPanel
key={panel.key}
stateKey={panel.key}
panel={panel}
dashboard={this.props.dashboard}
isEditing={panel.isEditing}
isViewing={panel.isViewing}
width={width}
height={height}
/>
);
}
render() {
const { dashboard } = this.props;
/**
* We have a parent with "flex: 1 1 0" we need to reset it to "flex: 1 1 auto" to have the AutoSizer
* properly working. For more information go here:
* https://github.com/bvaughn/react-virtualized/blob/master/docs/usingAutoSizer.md#can-i-use-autosizer-within-a-flex-container
*/
return (
<div style={{ flex: '1 1 auto', display: this.props.editPanel ? 'none' : undefined }}>
<AutoSizer disableHeight>
{({ width }) => {
if (width === 0) {
return null;
}
const draggable = width <= 769 ? false : dashboard.meta.canEdit;
/*
Disable draggable if mobile device, solving an issue with unintentionally
moving panels. https://github.com/grafana/grafana/issues/18497
theme.breakpoints.md = 769
*/
return (
/**
* The children is using a width of 100% so we need to guarantee that it is wrapped
* in an element that has the calculated size given by the AutoSizer. The AutoSizer
* has a width of 0 and will let its content overflow its div.
*/
<div style={{ width: `${width}px`, height: '100%' }}>
<ReactGridLayout
width={width}
isDraggable={draggable}
isResizable={dashboard.meta.canEdit}
containerPadding={[0, 0]}
useCSSTransforms={false}
margin={[GRID_CELL_VMARGIN, GRID_CELL_VMARGIN]}
cols={GRID_COLUMN_COUNT}
rowHeight={GRID_CELL_HEIGHT}
draggableHandle=".grid-drag-handle"
layout={this.buildLayout()}
onDragStop={this.onDragStop}
onResize={this.onResize}
onResizeStop={this.onResizeStop}
onLayoutChange={this.onLayoutChange}
>
{this.renderPanels(width)}
</ReactGridLayout>
</div>
);
}}
</AutoSizer>
</div>
);
}
}
interface GrafanaGridItemProps extends Record<string, any> {
gridWidth?: number;
gridPos?: GridPos;
isViewing: string;
windowHeight: number;
windowWidth: number;
children: any;
}
/**
* A hacky way to intercept the react-layout-grid item dimensions and pass them to DashboardPanel
*/
const GrafanaGridItem = React.forwardRef<HTMLDivElement, GrafanaGridItemProps>((props, ref) => {
const theme = config.theme2;
let width = 100;
let height = 100;
const { gridWidth, gridPos, isViewing, windowHeight, windowWidth, ...divProps } = props;
const style: CSSProperties = props.style ?? {};
if (isViewing) {
// In fullscreen view mode a single panel take up full width & 85% height
width = gridWidth!;
height = windowHeight * 0.85;
style.height = height;
style.width = '100%';
} else if (windowWidth < theme.breakpoints.values.md) {
// Mobile layout is a bit different, every panel take up full width
width = props.gridWidth!;
height = translateGridHeightToScreenHeight(gridPos!.h);
style.height = height;
style.width = '100%';
} else {
// Normal grid layout. The grid framework passes width and height directly to children as style props.
width = parseFloat(props.style.width);
height = parseFloat(props.style.height);
}
// props.children[0] is our main children. RGL adds the drag handle at props.children[1]
return (
<div {...divProps} ref={ref}>
{/* Pass width and height to children as render props */}
{[props.children[0](width, height), props.children.slice(1)]}
</div>
);
});
/**
* This translates grid height dimensions to real pixels
*/
function translateGridHeightToScreenHeight(gridHeight: number): number {
return gridHeight * (GRID_CELL_HEIGHT + GRID_CELL_VMARGIN) - GRID_CELL_VMARGIN;
}
GrafanaGridItem.displayName = 'GridItemWithDimensions';
export const DashboardGrid = connector(DashboardGridUnconnected);
| public/app/features/dashboard/dashgrid/DashboardGrid.tsx | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00022431476099882275,
0.00017118421965278685,
0.00016340985894203186,
0.0001695928949629888,
0.000009850485184870195
]
|
{
"id": 3,
"code_window": [
" name=\"Regex\"\n",
" placeholder=\"/.*-(?<text>.*)-(?<value>.*)-.*/\"\n",
" onChange={this.onRegExChange}\n",
" onBlur={this.onRegExBlur}\n",
" labelWidth={20}\n",
" tooltip={\n",
" <div>\n",
" Optional, if you want to extract part of a series name or metric node segment. Named capture groups\n",
" can be used to separate the display text and value (\n",
" <a\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive={true}\n"
],
"file_path": "public/app/features/variables/query/QueryVariableEditor.tsx",
"type": "add",
"edit_start_line_idx": 203
} | import React, { FormEvent, PropsWithChildren, ReactElement } from 'react';
import { InlineField, Input, PopoverContent } from '@grafana/ui';
interface VariableTextFieldProps {
value: string;
name: string;
placeholder: string;
onChange: (event: FormEvent<HTMLInputElement>) => void;
testId?: string;
tooltip?: PopoverContent;
required?: boolean;
width?: number;
labelWidth?: number;
grow?: boolean;
onBlur?: (event: FormEvent<HTMLInputElement>) => void;
}
export function VariableTextField({
value,
name,
placeholder,
onChange,
testId,
width,
labelWidth,
required,
onBlur,
tooltip,
grow,
}: PropsWithChildren<VariableTextFieldProps>): ReactElement {
return (
<InlineField label={name} labelWidth={labelWidth ?? 12} tooltip={tooltip} grow={grow}>
<Input
type="text"
id={name}
name={name}
placeholder={placeholder}
value={value}
onChange={onChange}
onBlur={onBlur}
width={grow ? undefined : width ?? 25}
data-testid={testId}
required={required}
/>
</InlineField>
);
}
| public/app/features/variables/editor/VariableTextField.tsx | 1 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00173791847191751,
0.0006340216496028006,
0.00016271996719297022,
0.00042357895290479064,
0.0005841953679919243
]
|
{
"id": 3,
"code_window": [
" name=\"Regex\"\n",
" placeholder=\"/.*-(?<text>.*)-(?<value>.*)-.*/\"\n",
" onChange={this.onRegExChange}\n",
" onBlur={this.onRegExBlur}\n",
" labelWidth={20}\n",
" tooltip={\n",
" <div>\n",
" Optional, if you want to extract part of a series name or metric node segment. Named capture groups\n",
" can be used to separate the display text and value (\n",
" <a\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive={true}\n"
],
"file_path": "public/app/features/variables/query/QueryVariableEditor.tsx",
"type": "add",
"edit_start_line_idx": 203
} | // Media queries
// ---------------------
@include media-breakpoint-down(xs) {
input[type='text'],
input[type='number'],
textarea {
font-size: 16px;
}
}
| public/sass/_old_responsive.scss | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00016987595881801099,
0.00016656744992360473,
0.00016325892647728324,
0.00016656744992360473,
0.0000033085161703638732
]
|
{
"id": 3,
"code_window": [
" name=\"Regex\"\n",
" placeholder=\"/.*-(?<text>.*)-(?<value>.*)-.*/\"\n",
" onChange={this.onRegExChange}\n",
" onBlur={this.onRegExBlur}\n",
" labelWidth={20}\n",
" tooltip={\n",
" <div>\n",
" Optional, if you want to extract part of a series name or metric node segment. Named capture groups\n",
" can be used to separate the display text and value (\n",
" <a\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive={true}\n"
],
"file_path": "public/app/features/variables/query/QueryVariableEditor.tsx",
"type": "add",
"edit_start_line_idx": 203
} | {
"type": "panel",
"name": "Live",
"id": "live",
"skipDataQuery": true,
"state": "alpha",
"info": {
"author": {
"name": "Grafana Labs",
"url": "https://grafana.com"
},
"logos": {
"small": "img/live.svg",
"large": "img/live.svg"
}
}
}
| public/app/plugins/panel/live/plugin.json | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00017339957412332296,
0.00016795028932392597,
0.00016325892647728324,
0.00016719233826734126,
0.0000041744492591533344
]
|
{
"id": 3,
"code_window": [
" name=\"Regex\"\n",
" placeholder=\"/.*-(?<text>.*)-(?<value>.*)-.*/\"\n",
" onChange={this.onRegExChange}\n",
" onBlur={this.onRegExBlur}\n",
" labelWidth={20}\n",
" tooltip={\n",
" <div>\n",
" Optional, if you want to extract part of a series name or metric node segment. Named capture groups\n",
" can be used to separate the display text and value (\n",
" <a\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" interactive={true}\n"
],
"file_path": "public/app/features/variables/query/QueryVariableEditor.tsx",
"type": "add",
"edit_start_line_idx": 203
} | {
"name": "Grafana-Email-Campaign",
"version": "1.0.0",
"description": "Grafana Email templates based on Zurb Ink",
"repository": "dnnsldr/",
"author": {
"name": "dnnsldr",
"email": "[email protected]",
"url": "https://github.com/dnnsldr"
},
"scripts": {
"build": "grunt",
"start": "grunt watch"
},
"devDependencies": {
"grunt": "1.0.1",
"grunt-premailer": "1.1.0",
"grunt-processhtml": "^0.4.2",
"grunt-uncss": "0.9.0",
"load-grunt-config": "3.0.1",
"grunt-contrib-watch": "1.1.0",
"grunt-text-replace": "0.4.0",
"grunt-assemble": "0.6.3",
"grunt-contrib-clean": "2.0.0"
}
}
| emails/package.json | 0 | https://github.com/grafana/grafana/commit/9db95826a41d016e90d5e4cc590acca6b21945ba | [
0.00017319632752332836,
0.00017062450933735818,
0.00016831234097480774,
0.00017036485951393843,
0.000002002314431592822
]
|
{
"id": 0,
"code_window": [
"\t\t\treturn;\n",
"\t\t}\n",
"\n",
"\t\tconst scrollPadding = this._notebookEditor.notebookOptions.computeTopInsertToolbarHeight(this._notebookEditor.textModel.viewType);\n",
"\n",
"\t\tconst cellExecutions = this._notebookExecutionStateService.getCellExecutionsForNotebook(this._notebookEditor.textModel?.uri)\n",
"\t\t\t.filter(exe => exe.state === NotebookCellExecutionState.Executing);\n",
"\t\tconst notebookExecution = this._notebookExecutionStateService.getExecution(this._notebookEditor.textModel?.uri);\n",
"\t\tconst executionIsVisible = (exe: INotebookCellExecution) => {\n",
"\t\t\tfor (const range of this._notebookEditor.visibleRanges) {\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 44
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { IDisposable } from 'vs/base/common/lifecycle';
import { clamp } from 'vs/base/common/numbers';
import { ICellViewModel, INotebookEditor } from 'vs/workbench/contrib/notebook/browser/notebookBrowser';
export function registerCellToolbarStickyScroll(notebookEditor: INotebookEditor, cell: ICellViewModel, element: HTMLElement, opts?: { extraOffset?: number; min?: number }): IDisposable {
const extraOffset = opts?.extraOffset ?? 0;
const min = opts?.min ?? 0;
const updateForScroll = () => {
if (cell.isInputCollapsed) {
element.style.top = '';
} else {
const scrollPadding = notebookEditor.notebookOptions.computeTopInsertToolbarHeight(notebookEditor.textModel?.viewType);
const scrollTop = notebookEditor.scrollTop - scrollPadding;
const elementTop = notebookEditor.getAbsoluteTopOfElement(cell);
const diff = scrollTop - elementTop + extraOffset;
const maxTop = cell.layoutInfo.editorHeight + cell.layoutInfo.statusBarHeight - 45; // subtract roughly the height of the execution order label plus padding
const top = maxTop > 20 ? // Don't move the run button if it can only move a very short distance
clamp(min, diff, maxTop) :
min;
element.style.top = `${top}px`;
}
};
updateForScroll();
return notebookEditor.onDidScroll(() => updateForScroll());
}
| src/vs/workbench/contrib/notebook/browser/view/cellParts/cellToolbarStickyScroll.ts | 1 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.8035044074058533,
0.2010677605867386,
0.00016658604727126658,
0.0003000302240252495,
0.34781697392463684
]
|
{
"id": 0,
"code_window": [
"\t\t\treturn;\n",
"\t\t}\n",
"\n",
"\t\tconst scrollPadding = this._notebookEditor.notebookOptions.computeTopInsertToolbarHeight(this._notebookEditor.textModel.viewType);\n",
"\n",
"\t\tconst cellExecutions = this._notebookExecutionStateService.getCellExecutionsForNotebook(this._notebookEditor.textModel?.uri)\n",
"\t\t\t.filter(exe => exe.state === NotebookCellExecutionState.Executing);\n",
"\t\tconst notebookExecution = this._notebookExecutionStateService.getExecution(this._notebookEditor.textModel?.uri);\n",
"\t\tconst executionIsVisible = (exe: INotebookCellExecution) => {\n",
"\t\t\tfor (const range of this._notebookEditor.visibleRanges) {\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 44
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { localize } from 'vs/nls';
import { registerColor } from 'vs/platform/theme/common/colorRegistry';
export const diffMoveBorder = registerColor(
'diffEditor.move.border',
{ dark: '#8b8b8b9c', light: '#8b8b8b9c', hcDark: '#8b8b8b9c', hcLight: '#8b8b8b9c', },
localize('diffEditor.move.border', 'The border color for text that got moved in the diff editor.')
);
| src/vs/editor/browser/widget/diffEditorWidget2/colors.ts | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.00017515785293653607,
0.00017224359908141196,
0.00016932934522628784,
0.00017224359908141196,
0.000002914253855124116
]
|
{
"id": 0,
"code_window": [
"\t\t\treturn;\n",
"\t\t}\n",
"\n",
"\t\tconst scrollPadding = this._notebookEditor.notebookOptions.computeTopInsertToolbarHeight(this._notebookEditor.textModel.viewType);\n",
"\n",
"\t\tconst cellExecutions = this._notebookExecutionStateService.getCellExecutionsForNotebook(this._notebookEditor.textModel?.uri)\n",
"\t\t\t.filter(exe => exe.state === NotebookCellExecutionState.Executing);\n",
"\t\tconst notebookExecution = this._notebookExecutionStateService.getExecution(this._notebookEditor.textModel?.uri);\n",
"\t\tconst executionIsVisible = (exe: INotebookCellExecution) => {\n",
"\t\t\tfor (const range of this._notebookEditor.visibleRanges) {\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 44
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./media/editorgroupview';
import { EditorGroupModel, IEditorOpenOptions, IGroupModelChangeEvent, ISerializedEditorGroupModel, isGroupEditorCloseEvent, isGroupEditorOpenEvent, isSerializedEditorGroupModel } from 'vs/workbench/common/editor/editorGroupModel';
import { GroupIdentifier, CloseDirection, IEditorCloseEvent, IEditorPane, SaveReason, IEditorPartOptionsChangeEvent, EditorsOrder, IVisibleEditorPane, EditorResourceAccessor, EditorInputCapabilities, IUntypedEditorInput, DEFAULT_EDITOR_ASSOCIATION, SideBySideEditor, EditorCloseContext, IEditorWillMoveEvent, IEditorWillOpenEvent, IMatchEditorOptions, GroupModelChangeKind, IActiveEditorChangeEvent, IFindEditorOptions } from 'vs/workbench/common/editor';
import { ActiveEditorGroupLockedContext, ActiveEditorDirtyContext, EditorGroupEditorsCountContext, ActiveEditorStickyContext, ActiveEditorPinnedContext, ActiveEditorLastInGroupContext, ActiveEditorFirstInGroupContext } from 'vs/workbench/common/contextkeys';
import { EditorInput } from 'vs/workbench/common/editor/editorInput';
import { SideBySideEditorInput } from 'vs/workbench/common/editor/sideBySideEditorInput';
import { Emitter, Relay } from 'vs/base/common/event';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { Dimension, trackFocus, addDisposableListener, EventType, EventHelper, findParentWithClass, clearNode, isAncestor, IDomNodePagePosition } from 'vs/base/browser/dom';
import { ServiceCollection } from 'vs/platform/instantiation/common/serviceCollection';
import { IContextKeyService } from 'vs/platform/contextkey/common/contextkey';
import { ProgressBar } from 'vs/base/browser/ui/progressbar/progressbar';
import { IThemeService, Themable } from 'vs/platform/theme/common/themeService';
import { editorBackground, contrastBorder } from 'vs/platform/theme/common/colorRegistry';
import { EDITOR_GROUP_HEADER_TABS_BACKGROUND, EDITOR_GROUP_HEADER_NO_TABS_BACKGROUND, EDITOR_GROUP_EMPTY_BACKGROUND, EDITOR_GROUP_HEADER_BORDER } from 'vs/workbench/common/theme';
import { ICloseEditorsFilter, GroupsOrder, ICloseEditorOptions, ICloseAllEditorsOptions, IEditorReplacement } from 'vs/workbench/services/editor/common/editorGroupsService';
import { TabsTitleControl } from 'vs/workbench/browser/parts/editor/tabsTitleControl';
import { EditorPanes } from 'vs/workbench/browser/parts/editor/editorPanes';
import { IEditorProgressService } from 'vs/platform/progress/common/progress';
import { EditorProgressIndicator } from 'vs/workbench/services/progress/browser/progressIndicator';
import { localize } from 'vs/nls';
import { coalesce, firstOrDefault } from 'vs/base/common/arrays';
import { MutableDisposable, toDisposable } from 'vs/base/common/lifecycle';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { DeferredPromise, Promises, RunOnceWorker } from 'vs/base/common/async';
import { EventType as TouchEventType, GestureEvent } from 'vs/base/browser/touch';
import { TitleControl } from 'vs/workbench/browser/parts/editor/titleControl';
import { IEditorGroupsAccessor, IEditorGroupView, fillActiveEditorViewState, EditorServiceImpl, IEditorGroupTitleHeight, IInternalEditorOpenOptions, IInternalMoveCopyOptions, IInternalEditorCloseOptions, IInternalEditorTitleControlOptions } from 'vs/workbench/browser/parts/editor/editor';
import { ActionBar } from 'vs/base/browser/ui/actionbar/actionbar';
import { IKeybindingService } from 'vs/platform/keybinding/common/keybinding';
import { IAction } from 'vs/base/common/actions';
import { NoTabsTitleControl } from 'vs/workbench/browser/parts/editor/noTabsTitleControl';
import { IMenuService, MenuId } from 'vs/platform/actions/common/actions';
import { StandardMouseEvent } from 'vs/base/browser/mouseEvent';
import { createAndFillInActionBarActions } from 'vs/platform/actions/browser/menuEntryActionViewItem';
import { IContextMenuService } from 'vs/platform/contextview/browser/contextView';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { hash } from 'vs/base/common/hash';
import { getMimeTypes } from 'vs/editor/common/services/languagesAssociations';
import { extname, isEqual } from 'vs/base/common/resources';
import { Schemas } from 'vs/base/common/network';
import { EditorActivation, IEditorOptions } from 'vs/platform/editor/common/editor';
import { IFileDialogService, ConfirmResult } from 'vs/platform/dialogs/common/dialogs';
import { IFilesConfigurationService, AutoSaveMode } from 'vs/workbench/services/filesConfiguration/common/filesConfigurationService';
import { withNullAsUndefined } from 'vs/base/common/types';
import { URI } from 'vs/base/common/uri';
import { IUriIdentityService } from 'vs/platform/uriIdentity/common/uriIdentity';
import { isLinux, isMacintosh, isNative, isWindows } from 'vs/base/common/platform';
import { ILogService } from 'vs/platform/log/common/log';
import { TelemetryTrustedValue } from 'vs/platform/telemetry/common/telemetryUtils';
import { defaultProgressBarStyles } from 'vs/platform/theme/browser/defaultStyles';
import { IBoundarySashes } from 'vs/base/browser/ui/sash/sash';
import { EditorGroupWatermark } from 'vs/workbench/browser/parts/editor/editorGroupWatermark';
export class EditorGroupView extends Themable implements IEditorGroupView {
//#region factory
static createNew(accessor: IEditorGroupsAccessor, index: number, instantiationService: IInstantiationService): IEditorGroupView {
return instantiationService.createInstance(EditorGroupView, accessor, null, index);
}
static createFromSerialized(serialized: ISerializedEditorGroupModel, accessor: IEditorGroupsAccessor, index: number, instantiationService: IInstantiationService): IEditorGroupView {
return instantiationService.createInstance(EditorGroupView, accessor, serialized, index);
}
static createCopy(copyFrom: IEditorGroupView, accessor: IEditorGroupsAccessor, index: number, instantiationService: IInstantiationService): IEditorGroupView {
return instantiationService.createInstance(EditorGroupView, accessor, copyFrom, index);
}
//#endregion
/**
* Access to the context key service scoped to this editor group.
*/
readonly scopedContextKeyService: IContextKeyService;
//#region events
private readonly _onDidFocus = this._register(new Emitter<void>());
readonly onDidFocus = this._onDidFocus.event;
private readonly _onWillDispose = this._register(new Emitter<void>());
readonly onWillDispose = this._onWillDispose.event;
private readonly _onDidModelChange = this._register(new Emitter<IGroupModelChangeEvent>());
readonly onDidModelChange = this._onDidModelChange.event;
private readonly _onDidActiveEditorChange = this._register(new Emitter<IActiveEditorChangeEvent>());
readonly onDidActiveEditorChange = this._onDidActiveEditorChange.event;
private readonly _onDidOpenEditorFail = this._register(new Emitter<EditorInput>());
readonly onDidOpenEditorFail = this._onDidOpenEditorFail.event;
private readonly _onWillCloseEditor = this._register(new Emitter<IEditorCloseEvent>());
readonly onWillCloseEditor = this._onWillCloseEditor.event;
private readonly _onDidCloseEditor = this._register(new Emitter<IEditorCloseEvent>());
readonly onDidCloseEditor = this._onDidCloseEditor.event;
private readonly _onWillMoveEditor = this._register(new Emitter<IEditorWillMoveEvent>());
readonly onWillMoveEditor = this._onWillMoveEditor.event;
private readonly _onWillOpenEditor = this._register(new Emitter<IEditorWillOpenEvent>());
readonly onWillOpenEditor = this._onWillOpenEditor.event;
//#endregion
private readonly model: EditorGroupModel;
private active: boolean | undefined;
private lastLayout: IDomNodePagePosition | undefined;
private readonly scopedInstantiationService: IInstantiationService;
private readonly titleContainer: HTMLElement;
private titleAreaControl: TitleControl;
private readonly progressBar: ProgressBar;
private readonly editorContainer: HTMLElement;
private readonly editorPane: EditorPanes;
private readonly disposedEditorsWorker = this._register(new RunOnceWorker<EditorInput>(editors => this.handleDisposedEditors(editors), 0));
private readonly mapEditorToPendingConfirmation = new Map<EditorInput, Promise<boolean>>();
private readonly containerToolBarMenuDisposable = this._register(new MutableDisposable());
private readonly whenRestoredPromise = new DeferredPromise<void>();
readonly whenRestored = this.whenRestoredPromise.p;
constructor(
private accessor: IEditorGroupsAccessor,
from: IEditorGroupView | ISerializedEditorGroupModel | null,
private _index: number,
@IInstantiationService private readonly instantiationService: IInstantiationService,
@IContextKeyService private readonly contextKeyService: IContextKeyService,
@IThemeService themeService: IThemeService,
@ITelemetryService private readonly telemetryService: ITelemetryService,
@IKeybindingService private readonly keybindingService: IKeybindingService,
@IMenuService private readonly menuService: IMenuService,
@IContextMenuService private readonly contextMenuService: IContextMenuService,
@IFileDialogService private readonly fileDialogService: IFileDialogService,
@IEditorService private readonly editorService: EditorServiceImpl,
@IFilesConfigurationService private readonly filesConfigurationService: IFilesConfigurationService,
@IUriIdentityService private readonly uriIdentityService: IUriIdentityService,
@ILogService private readonly logService: ILogService
) {
super(themeService);
if (from instanceof EditorGroupView) {
this.model = this._register(from.model.clone());
} else if (isSerializedEditorGroupModel(from)) {
this.model = this._register(instantiationService.createInstance(EditorGroupModel, from));
} else {
this.model = this._register(instantiationService.createInstance(EditorGroupModel, undefined));
}
//#region create()
{
// Scoped context key service
this.scopedContextKeyService = this._register(this.contextKeyService.createScoped(this.element));
// Container
this.element.classList.add('editor-group-container');
// Container listeners
this.registerContainerListeners();
// Container toolbar
this.createContainerToolbar();
// Container context menu
this.createContainerContextMenu();
// Watermark & shortcuts
this._register(this.instantiationService.createInstance(EditorGroupWatermark, this.element));
// Progress bar
this.progressBar = this._register(new ProgressBar(this.element, defaultProgressBarStyles));
this.progressBar.hide();
// Scoped instantiation service
this.scopedInstantiationService = this.instantiationService.createChild(new ServiceCollection(
[IContextKeyService, this.scopedContextKeyService],
[IEditorProgressService, this._register(new EditorProgressIndicator(this.progressBar, this))]
));
// Context keys
this.handleGroupContextKeys();
// Title container
this.titleContainer = document.createElement('div');
this.titleContainer.classList.add('title');
this.element.appendChild(this.titleContainer);
// Title control
this.titleAreaControl = this.createTitleAreaControl();
// Editor container
this.editorContainer = document.createElement('div');
this.editorContainer.classList.add('editor-container');
this.element.appendChild(this.editorContainer);
// Editor pane
this.editorPane = this._register(this.scopedInstantiationService.createInstance(EditorPanes, this.element, this.editorContainer, this));
this._onDidChange.input = this.editorPane.onDidChangeSizeConstraints;
// Track Focus
this.doTrackFocus();
// Update containers
this.updateTitleContainer();
this.updateContainer();
// Update styles
this.updateStyles();
}
//#endregion
// Restore editors if provided
const restoreEditorsPromise = this.restoreEditors(from) ?? Promise.resolve();
// Signal restored once editors have restored
restoreEditorsPromise.finally(() => {
this.whenRestoredPromise.complete();
});
// Register Listeners
this.registerListeners();
}
private handleGroupContextKeys(): void {
const groupActiveEditorDirtyContext = ActiveEditorDirtyContext.bindTo(this.scopedContextKeyService);
const groupActiveEditorPinnedContext = ActiveEditorPinnedContext.bindTo(this.scopedContextKeyService);
const groupActiveEditorFirstContext = ActiveEditorFirstInGroupContext.bindTo(this.scopedContextKeyService);
const groupActiveEditorLastContext = ActiveEditorLastInGroupContext.bindTo(this.scopedContextKeyService);
const groupActiveEditorStickyContext = ActiveEditorStickyContext.bindTo(this.scopedContextKeyService);
const groupEditorsCountContext = EditorGroupEditorsCountContext.bindTo(this.scopedContextKeyService);
const groupLockedContext = ActiveEditorGroupLockedContext.bindTo(this.scopedContextKeyService);
const activeEditorListener = new MutableDisposable();
const observeActiveEditor = () => {
activeEditorListener.clear();
const activeEditor = this.model.activeEditor;
if (activeEditor) {
groupActiveEditorDirtyContext.set(activeEditor.isDirty() && !activeEditor.isSaving());
activeEditorListener.value = activeEditor.onDidChangeDirty(() => {
groupActiveEditorDirtyContext.set(activeEditor.isDirty() && !activeEditor.isSaving());
});
} else {
groupActiveEditorDirtyContext.set(false);
}
};
// Update group contexts based on group changes
this._register(this.onDidModelChange(e => {
switch (e.kind) {
case GroupModelChangeKind.GROUP_LOCKED:
groupLockedContext.set(this.isLocked);
break;
case GroupModelChangeKind.EDITOR_ACTIVE:
case GroupModelChangeKind.EDITOR_CLOSE:
case GroupModelChangeKind.EDITOR_OPEN:
case GroupModelChangeKind.EDITOR_MOVE:
groupActiveEditorFirstContext.set(this.model.isFirst(this.model.activeEditor));
groupActiveEditorLastContext.set(this.model.isLast(this.model.activeEditor));
break;
case GroupModelChangeKind.EDITOR_PIN:
if (e.editor && e.editor === this.model.activeEditor) {
groupActiveEditorPinnedContext.set(this.model.isPinned(this.model.activeEditor));
}
break;
case GroupModelChangeKind.EDITOR_STICKY:
if (e.editor && e.editor === this.model.activeEditor) {
groupActiveEditorStickyContext.set(this.model.isSticky(this.model.activeEditor));
}
break;
}
// Group editors count context
groupEditorsCountContext.set(this.count);
}));
// Track the active editor and update context key that reflects
// the dirty state of this editor
this._register(this.onDidActiveEditorChange(() => {
observeActiveEditor();
}));
observeActiveEditor();
}
private registerContainerListeners(): void {
// Open new file via doubleclick on empty container
this._register(addDisposableListener(this.element, EventType.DBLCLICK, e => {
if (this.isEmpty) {
EventHelper.stop(e);
this.editorService.openEditor({
resource: undefined,
options: {
pinned: true,
override: DEFAULT_EDITOR_ASSOCIATION.id
}
}, this.id);
}
}));
// Close empty editor group via middle mouse click
this._register(addDisposableListener(this.element, EventType.AUXCLICK, e => {
if (this.isEmpty && e.button === 1 /* Middle Button */) {
EventHelper.stop(e, true);
this.accessor.removeGroup(this);
}
}));
}
private createContainerToolbar(): void {
// Toolbar Container
const toolbarContainer = document.createElement('div');
toolbarContainer.classList.add('editor-group-container-toolbar');
this.element.appendChild(toolbarContainer);
// Toolbar
const containerToolbar = this._register(new ActionBar(toolbarContainer, {
ariaLabel: localize('ariaLabelGroupActions', "Empty editor group actions")
}));
// Toolbar actions
const containerToolbarMenu = this._register(this.menuService.createMenu(MenuId.EmptyEditorGroup, this.scopedContextKeyService));
const updateContainerToolbar = () => {
const actions: { primary: IAction[]; secondary: IAction[] } = { primary: [], secondary: [] };
// Clear old actions
this.containerToolBarMenuDisposable.value = toDisposable(() => containerToolbar.clear());
// Create new actions
createAndFillInActionBarActions(
containerToolbarMenu,
{ arg: { groupId: this.id }, shouldForwardArgs: true },
actions,
'navigation'
);
for (const action of [...actions.primary, ...actions.secondary]) {
const keybinding = this.keybindingService.lookupKeybinding(action.id);
containerToolbar.push(action, { icon: true, label: false, keybinding: keybinding?.getLabel() });
}
};
updateContainerToolbar();
this._register(containerToolbarMenu.onDidChange(updateContainerToolbar));
}
private createContainerContextMenu(): void {
this._register(addDisposableListener(this.element, EventType.CONTEXT_MENU, e => this.onShowContainerContextMenu(e)));
this._register(addDisposableListener(this.element, TouchEventType.Contextmenu, () => this.onShowContainerContextMenu()));
}
private onShowContainerContextMenu(e?: MouseEvent): void {
if (!this.isEmpty) {
return; // only for empty editor groups
}
// Find target anchor
let anchor: HTMLElement | StandardMouseEvent = this.element;
if (e instanceof MouseEvent) {
anchor = new StandardMouseEvent(e);
}
// Show it
this.contextMenuService.showContextMenu({
menuId: MenuId.EmptyEditorGroupContext,
contextKeyService: this.contextKeyService,
getAnchor: () => anchor,
onHide: () => {
this.focus();
}
});
}
private doTrackFocus(): void {
// Container
const containerFocusTracker = this._register(trackFocus(this.element));
this._register(containerFocusTracker.onDidFocus(() => {
if (this.isEmpty) {
this._onDidFocus.fire(); // only when empty to prevent accident focus
}
}));
// Title Container
const handleTitleClickOrTouch = (e: MouseEvent | GestureEvent): void => {
let target: HTMLElement;
if (e instanceof MouseEvent) {
if (e.button !== 0 /* middle/right mouse button */ || (isMacintosh && e.ctrlKey /* macOS context menu */)) {
return undefined;
}
target = e.target as HTMLElement;
} else {
target = (e as GestureEvent).initialTarget as HTMLElement;
}
if (findParentWithClass(target, 'monaco-action-bar', this.titleContainer) ||
findParentWithClass(target, 'monaco-breadcrumb-item', this.titleContainer)
) {
return; // not when clicking on actions or breadcrumbs
}
// timeout to keep focus in editor after mouse up
setTimeout(() => {
this.focus();
});
};
this._register(addDisposableListener(this.titleContainer, EventType.MOUSE_DOWN, e => handleTitleClickOrTouch(e)));
this._register(addDisposableListener(this.titleContainer, TouchEventType.Tap, e => handleTitleClickOrTouch(e)));
// Editor pane
this._register(this.editorPane.onDidFocus(() => {
this._onDidFocus.fire();
}));
}
private updateContainer(): void {
// Empty Container: add some empty container attributes
if (this.isEmpty) {
this.element.classList.add('empty');
this.element.tabIndex = 0;
this.element.setAttribute('aria-label', localize('emptyEditorGroup', "{0} (empty)", this.label));
}
// Non-Empty Container: revert empty container attributes
else {
this.element.classList.remove('empty');
this.element.removeAttribute('tabIndex');
this.element.removeAttribute('aria-label');
}
// Update styles
this.updateStyles();
}
private updateTitleContainer(): void {
this.titleContainer.classList.toggle('tabs', this.accessor.partOptions.showTabs);
this.titleContainer.classList.toggle('show-file-icons', this.accessor.partOptions.showIcons);
}
private createTitleAreaControl(): TitleControl {
// Clear old if existing
if (this.titleAreaControl) {
this.titleAreaControl.dispose();
clearNode(this.titleContainer);
}
// Create new based on options
if (this.accessor.partOptions.showTabs) {
this.titleAreaControl = this.scopedInstantiationService.createInstance(TabsTitleControl, this.titleContainer, this.accessor, this);
} else {
this.titleAreaControl = this.scopedInstantiationService.createInstance(NoTabsTitleControl, this.titleContainer, this.accessor, this);
}
return this.titleAreaControl;
}
private restoreEditors(from: IEditorGroupView | ISerializedEditorGroupModel | null): Promise<void> | undefined {
if (this.count === 0) {
return; // nothing to show
}
// Determine editor options
let options: IEditorOptions;
if (from instanceof EditorGroupView) {
options = fillActiveEditorViewState(from); // if we copy from another group, ensure to copy its active editor viewstate
} else {
options = Object.create(null);
}
const activeEditor = this.model.activeEditor;
if (!activeEditor) {
return;
}
options.pinned = this.model.isPinned(activeEditor); // preserve pinned state
options.sticky = this.model.isSticky(activeEditor); // preserve sticky state
options.preserveFocus = true; // handle focus after editor is opened
const activeElement = document.activeElement;
// Show active editor (intentionally not using async to keep
// `restoreEditors` from executing in same stack)
return this.doShowEditor(activeEditor, { active: true, isNew: false /* restored */ }, options).then(() => {
// Set focused now if this is the active group and focus has
// not changed meanwhile. This prevents focus from being
// stolen accidentally on startup when the user already
// clicked somewhere.
if (this.accessor.activeGroup === this && activeElement === document.activeElement) {
this.focus();
}
});
}
//#region event handling
private registerListeners(): void {
// Model Events
this._register(this.model.onDidModelChange(e => this.onDidGroupModelChange(e)));
// Option Changes
this._register(this.accessor.onDidChangeEditorPartOptions(e => this.onDidChangeEditorPartOptions(e)));
// Visibility
this._register(this.accessor.onDidVisibilityChange(e => this.onDidVisibilityChange(e)));
}
private onDidGroupModelChange(e: IGroupModelChangeEvent): void {
// Re-emit to outside
this._onDidModelChange.fire(e);
// Handle within
if (!e.editor) {
return;
}
switch (e.kind) {
case GroupModelChangeKind.EDITOR_OPEN:
if (isGroupEditorOpenEvent(e)) {
this.onDidOpenEditor(e.editor, e.editorIndex);
}
break;
case GroupModelChangeKind.EDITOR_CLOSE:
if (isGroupEditorCloseEvent(e)) {
this.handleOnDidCloseEditor(e.editor, e.editorIndex, e.context, e.sticky);
}
break;
case GroupModelChangeKind.EDITOR_WILL_DISPOSE:
this.onWillDisposeEditor(e.editor);
break;
case GroupModelChangeKind.EDITOR_DIRTY:
this.onDidChangeEditorDirty(e.editor);
break;
case GroupModelChangeKind.EDITOR_LABEL:
this.onDidChangeEditorLabel(e.editor);
break;
}
}
private onDidOpenEditor(editor: EditorInput, editorIndex: number): void {
/* __GDPR__
"editorOpened" : {
"owner": "bpasero",
"${include}": [
"${EditorTelemetryDescriptor}"
]
}
*/
this.telemetryService.publicLog('editorOpened', this.toEditorTelemetryDescriptor(editor));
// Update container
this.updateContainer();
}
private handleOnDidCloseEditor(editor: EditorInput, editorIndex: number, context: EditorCloseContext, sticky: boolean): void {
// Before close
this._onWillCloseEditor.fire({ groupId: this.id, editor, context, index: editorIndex, sticky });
// Handle event
const editorsToClose: EditorInput[] = [editor];
// Include both sides of side by side editors when being closed
if (editor instanceof SideBySideEditorInput) {
editorsToClose.push(editor.primary, editor.secondary);
}
// For each editor to close, we call dispose() to free up any resources.
// However, certain editors might be shared across multiple editor groups
// (including being visible in side by side / diff editors) and as such we
// only dispose when they are not opened elsewhere.
for (const editor of editorsToClose) {
if (this.canDispose(editor)) {
editor.dispose();
}
}
/* __GDPR__
"editorClosed" : {
"owner": "bpasero",
"${include}": [
"${EditorTelemetryDescriptor}"
]
}
*/
this.telemetryService.publicLog('editorClosed', this.toEditorTelemetryDescriptor(editor));
// Update container
this.updateContainer();
// Event
this._onDidCloseEditor.fire({ groupId: this.id, editor, context, index: editorIndex, sticky });
}
private canDispose(editor: EditorInput): boolean {
for (const groupView of this.accessor.groups) {
if (groupView instanceof EditorGroupView && groupView.model.contains(editor, {
strictEquals: true, // only if this input is not shared across editor groups
supportSideBySide: SideBySideEditor.ANY // include any side of an opened side by side editor
})) {
return false;
}
}
return true;
}
private toEditorTelemetryDescriptor(editor: EditorInput): object {
const descriptor = editor.getTelemetryDescriptor();
const resource = EditorResourceAccessor.getOriginalUri(editor);
const path = resource ? resource.scheme === Schemas.file ? resource.fsPath : resource.path : undefined;
if (resource && path) {
let resourceExt = extname(resource);
// Remove query parameters from the resource extension
const queryStringLocation = resourceExt.indexOf('?');
resourceExt = queryStringLocation !== -1 ? resourceExt.substr(0, queryStringLocation) : resourceExt;
descriptor['resource'] = { mimeType: new TelemetryTrustedValue(getMimeTypes(resource).join(', ')), scheme: resource.scheme, ext: resourceExt, path: hash(path) };
/* __GDPR__FRAGMENT__
"EditorTelemetryDescriptor" : {
"resource": { "${inline}": [ "${URIDescriptor}" ] }
}
*/
return descriptor;
}
return descriptor;
}
private onWillDisposeEditor(editor: EditorInput): void {
// To prevent race conditions, we handle disposed editors in our worker with a timeout
// because it can happen that an input is being disposed with the intent to replace
// it with some other input right after.
this.disposedEditorsWorker.work(editor);
}
private handleDisposedEditors(disposedEditors: EditorInput[]): void {
// Split between visible and hidden editors
let activeEditor: EditorInput | undefined;
const inactiveEditors: EditorInput[] = [];
for (const disposedEditor of disposedEditors) {
const editorFindResult = this.model.findEditor(disposedEditor);
if (!editorFindResult) {
continue; // not part of the model anymore
}
const editor = editorFindResult[0];
if (!editor.isDisposed()) {
continue; // editor got reopened meanwhile
}
if (this.model.isActive(editor)) {
activeEditor = editor;
} else {
inactiveEditors.push(editor);
}
}
// Close all inactive editors first to prevent UI flicker
for (const inactiveEditor of inactiveEditors) {
this.doCloseEditor(inactiveEditor, false);
}
// Close active one last
if (activeEditor) {
this.doCloseEditor(activeEditor, false);
}
}
private onDidChangeEditorPartOptions(event: IEditorPartOptionsChangeEvent): void {
// Title container
this.updateTitleContainer();
// Title control Switch between showing tabs <=> not showing tabs
if (event.oldPartOptions.showTabs !== event.newPartOptions.showTabs) {
// Recreate title control
this.createTitleAreaControl();
// Re-layout
this.relayout();
// Ensure to show active editor if any
if (this.model.activeEditor) {
this.titleAreaControl.openEditor(this.model.activeEditor);
}
}
// Just update title control
else {
this.titleAreaControl.updateOptions(event.oldPartOptions, event.newPartOptions);
}
// Styles
this.updateStyles();
// Pin preview editor once user disables preview
if (event.oldPartOptions.enablePreview && !event.newPartOptions.enablePreview) {
if (this.model.previewEditor) {
this.pinEditor(this.model.previewEditor);
}
}
}
private onDidChangeEditorDirty(editor: EditorInput): void {
// Always show dirty editors pinned
this.pinEditor(editor);
// Forward to title control
this.titleAreaControl.updateEditorDirty(editor);
}
private onDidChangeEditorLabel(editor: EditorInput): void {
// Forward to title control
this.titleAreaControl.updateEditorLabel(editor);
}
private onDidVisibilityChange(visible: boolean): void {
// Forward to active editor pane
this.editorPane.setVisible(visible);
}
//#endregion
//#region IEditorGroupView
get index(): number {
return this._index;
}
get label(): string {
return localize('groupLabel', "Group {0}", this._index + 1);
}
get ariaLabel(): string {
return localize('groupAriaLabel', "Editor Group {0}", this._index + 1);
}
private _disposed = false;
get disposed(): boolean {
return this._disposed;
}
get isEmpty(): boolean {
return this.count === 0;
}
get titleHeight(): IEditorGroupTitleHeight {
return this.titleAreaControl.getHeight();
}
notifyIndexChanged(newIndex: number): void {
if (this._index !== newIndex) {
this._index = newIndex;
this.model.setIndex(newIndex);
}
}
setActive(isActive: boolean): void {
this.active = isActive;
// Update container
this.element.classList.toggle('active', isActive);
this.element.classList.toggle('inactive', !isActive);
// Update title control
this.titleAreaControl.setActive(isActive);
// Update styles
this.updateStyles();
// Update model
this.model.setActive(undefined /* entire group got active */);
}
//#endregion
//#region IEditorGroup
//#region basics()
get id(): GroupIdentifier {
return this.model.id;
}
get editors(): EditorInput[] {
return this.model.getEditors(EditorsOrder.SEQUENTIAL);
}
get count(): number {
return this.model.count;
}
get stickyCount(): number {
return this.model.stickyCount;
}
get activeEditorPane(): IVisibleEditorPane | undefined {
return this.editorPane ? withNullAsUndefined(this.editorPane.activeEditorPane) : undefined;
}
get activeEditor(): EditorInput | null {
return this.model.activeEditor;
}
get previewEditor(): EditorInput | null {
return this.model.previewEditor;
}
isPinned(editorOrIndex: EditorInput | number): boolean {
return this.model.isPinned(editorOrIndex);
}
isSticky(editorOrIndex: EditorInput | number): boolean {
return this.model.isSticky(editorOrIndex);
}
isActive(editor: EditorInput | IUntypedEditorInput): boolean {
return this.model.isActive(editor);
}
contains(candidate: EditorInput | IUntypedEditorInput, options?: IMatchEditorOptions): boolean {
return this.model.contains(candidate, options);
}
getEditors(order: EditorsOrder, options?: { excludeSticky?: boolean }): EditorInput[] {
return this.model.getEditors(order, options);
}
findEditors(resource: URI, options?: IFindEditorOptions): EditorInput[] {
const canonicalResource = this.uriIdentityService.asCanonicalUri(resource);
return this.getEditors(EditorsOrder.SEQUENTIAL).filter(editor => {
if (editor.resource && isEqual(editor.resource, canonicalResource)) {
return true;
}
// Support side by side editor primary side if specified
if (options?.supportSideBySide === SideBySideEditor.PRIMARY || options?.supportSideBySide === SideBySideEditor.ANY) {
const primaryResource = EditorResourceAccessor.getCanonicalUri(editor, { supportSideBySide: SideBySideEditor.PRIMARY });
if (primaryResource && isEqual(primaryResource, canonicalResource)) {
return true;
}
}
// Support side by side editor secondary side if specified
if (options?.supportSideBySide === SideBySideEditor.SECONDARY || options?.supportSideBySide === SideBySideEditor.ANY) {
const secondaryResource = EditorResourceAccessor.getCanonicalUri(editor, { supportSideBySide: SideBySideEditor.SECONDARY });
if (secondaryResource && isEqual(secondaryResource, canonicalResource)) {
return true;
}
}
return false;
});
}
getEditorByIndex(index: number): EditorInput | undefined {
return this.model.getEditorByIndex(index);
}
getIndexOfEditor(editor: EditorInput): number {
return this.model.indexOf(editor);
}
isFirst(editor: EditorInput): boolean {
return this.model.isFirst(editor);
}
isLast(editor: EditorInput): boolean {
return this.model.isLast(editor);
}
focus(): void {
// Pass focus to editor panes
if (this.activeEditorPane) {
this.activeEditorPane.focus();
} else {
this.element.focus();
}
// Event
this._onDidFocus.fire();
}
pinEditor(candidate: EditorInput | undefined = this.activeEditor || undefined): void {
if (candidate && !this.model.isPinned(candidate)) {
// Update model
const editor = this.model.pin(candidate);
// Forward to title control
if (editor) {
this.titleAreaControl.pinEditor(editor);
}
}
}
stickEditor(candidate: EditorInput | undefined = this.activeEditor || undefined): void {
this.doStickEditor(candidate, true);
}
unstickEditor(candidate: EditorInput | undefined = this.activeEditor || undefined): void {
this.doStickEditor(candidate, false);
}
private doStickEditor(candidate: EditorInput | undefined, sticky: boolean): void {
if (candidate && this.model.isSticky(candidate) !== sticky) {
const oldIndexOfEditor = this.getIndexOfEditor(candidate);
// Update model
const editor = sticky ? this.model.stick(candidate) : this.model.unstick(candidate);
if (!editor) {
return;
}
// If the index of the editor changed, we need to forward this to
// title control and also make sure to emit this as an event
const newIndexOfEditor = this.getIndexOfEditor(editor);
if (newIndexOfEditor !== oldIndexOfEditor) {
this.titleAreaControl.moveEditor(editor, oldIndexOfEditor, newIndexOfEditor);
}
// Forward sticky state to title control
if (sticky) {
this.titleAreaControl.stickEditor(editor);
} else {
this.titleAreaControl.unstickEditor(editor);
}
}
}
//#endregion
//#region openEditor()
async openEditor(editor: EditorInput, options?: IEditorOptions): Promise<IEditorPane | undefined> {
return this.doOpenEditor(editor, options, {
// Allow to match on a side-by-side editor when same
// editor is opened on both sides. In that case we
// do not want to open a new editor but reuse that one.
supportSideBySide: SideBySideEditor.BOTH
});
}
private async doOpenEditor(editor: EditorInput, options?: IEditorOptions, internalOptions?: IInternalEditorOpenOptions): Promise<IEditorPane | undefined> {
// Guard against invalid editors. Disposed editors
// should never open because they emit no events
// e.g. to indicate dirty changes.
if (!editor || editor.isDisposed()) {
return;
}
// Fire the event letting everyone know we are about to open an editor
this._onWillOpenEditor.fire({ editor, groupId: this.id });
// Determine options
const pinned = options?.sticky
|| !this.accessor.partOptions.enablePreview
|| editor.isDirty()
|| (options?.pinned ?? typeof options?.index === 'number' /* unless specified, prefer to pin when opening with index */)
|| (typeof options?.index === 'number' && this.model.isSticky(options.index))
|| editor.hasCapability(EditorInputCapabilities.Scratchpad);
const openEditorOptions: IEditorOpenOptions = {
index: options ? options.index : undefined,
pinned,
sticky: options?.sticky || (typeof options?.index === 'number' && this.model.isSticky(options.index)),
active: this.count === 0 || !options || !options.inactive,
supportSideBySide: internalOptions?.supportSideBySide
};
if (options?.sticky && typeof options?.index === 'number' && !this.model.isSticky(options.index)) {
// Special case: we are to open an editor sticky but at an index that is not sticky
// In that case we prefer to open the editor at the index but not sticky. This enables
// to drag a sticky editor to an index that is not sticky to unstick it.
openEditorOptions.sticky = false;
}
if (!openEditorOptions.active && !openEditorOptions.pinned && this.model.activeEditor && !this.model.isPinned(this.model.activeEditor)) {
// Special case: we are to open an editor inactive and not pinned, but the current active
// editor is also not pinned, which means it will get replaced with this one. As such,
// the editor can only be active.
openEditorOptions.active = true;
}
let activateGroup = false;
let restoreGroup = false;
if (options?.activation === EditorActivation.ACTIVATE) {
// Respect option to force activate an editor group.
activateGroup = true;
} else if (options?.activation === EditorActivation.RESTORE) {
// Respect option to force restore an editor group.
restoreGroup = true;
} else if (options?.activation === EditorActivation.PRESERVE) {
// Respect option to preserve active editor group.
activateGroup = false;
restoreGroup = false;
} else if (openEditorOptions.active) {
// Finally, we only activate/restore an editor which is
// opening as active editor.
// If preserveFocus is enabled, we only restore but never
// activate the group.
activateGroup = !options || !options.preserveFocus;
restoreGroup = !activateGroup;
}
// Actually move the editor if a specific index is provided and we figure
// out that the editor is already opened at a different index. This
// ensures the right set of events are fired to the outside.
if (typeof openEditorOptions.index === 'number') {
const indexOfEditor = this.model.indexOf(editor);
if (indexOfEditor !== -1 && indexOfEditor !== openEditorOptions.index) {
this.doMoveEditorInsideGroup(editor, openEditorOptions);
}
}
// Update model and make sure to continue to use the editor we get from
// the model. It is possible that the editor was already opened and we
// want to ensure that we use the existing instance in that case.
const { editor: openedEditor, isNew } = this.model.openEditor(editor, openEditorOptions);
// Conditionally lock the group
if (
isNew && // only if this editor was new for the group
this.count === 1 && // only when this editor was the first editor in the group
this.accessor.groups.length > 1 // only when there are more than one groups open
) {
// only when the editor identifier is configured as such
if (openedEditor.editorId && this.accessor.partOptions.autoLockGroups?.has(openedEditor.editorId)) {
this.lock(true);
}
}
// Show editor
const showEditorResult = this.doShowEditor(openedEditor, { active: !!openEditorOptions.active, isNew }, options, internalOptions);
// Finally make sure the group is active or restored as instructed
if (activateGroup) {
this.accessor.activateGroup(this);
} else if (restoreGroup) {
this.accessor.restoreGroup(this);
}
return showEditorResult;
}
private doShowEditor(editor: EditorInput, context: { active: boolean; isNew: boolean }, options?: IEditorOptions, internalOptions?: IInternalEditorOpenOptions): Promise<IEditorPane | undefined> {
// Show in editor control if the active editor changed
let openEditorPromise: Promise<IEditorPane | undefined>;
if (context.active) {
openEditorPromise = (async () => {
const { pane, changed, cancelled, error } = await this.editorPane.openEditor(editor, options, { newInGroup: context.isNew });
// Return early if the operation was cancelled by another operation
if (cancelled) {
return undefined;
}
// Editor change event
if (changed) {
this._onDidActiveEditorChange.fire({ editor });
}
// Indicate error as an event but do not bubble them up
if (error) {
this._onDidOpenEditorFail.fire(editor);
}
// Without an editor pane, recover by closing the active editor
// (if the input is still the active one)
if (!pane && this.activeEditor === editor) {
const focusNext = !options || !options.preserveFocus;
this.doCloseEditor(editor, focusNext, { fromError: true });
}
return pane;
})();
} else {
openEditorPromise = Promise.resolve(undefined); // inactive: return undefined as result to signal this
}
// Show in title control after editor control because some actions depend on it
// but respect the internal options in case title control updates should skip.
if (!internalOptions?.skipTitleUpdate) {
this.titleAreaControl.openEditor(editor);
}
return openEditorPromise;
}
//#endregion
//#region openEditors()
async openEditors(editors: { editor: EditorInput; options?: IEditorOptions }[]): Promise<IEditorPane | undefined> {
// Guard against invalid editors. Disposed editors
// should never open because they emit no events
// e.g. to indicate dirty changes.
const editorsToOpen = coalesce(editors).filter(({ editor }) => !editor.isDisposed());
// Use the first editor as active editor
const firstEditor = firstOrDefault(editorsToOpen);
if (!firstEditor) {
return;
}
const openEditorsOptions: IInternalEditorOpenOptions = {
// Allow to match on a side-by-side editor when same
// editor is opened on both sides. In that case we
// do not want to open a new editor but reuse that one.
supportSideBySide: SideBySideEditor.BOTH
};
await this.doOpenEditor(firstEditor.editor, firstEditor.options, openEditorsOptions);
// Open the other ones inactive
const inactiveEditors = editorsToOpen.slice(1);
const startingIndex = this.getIndexOfEditor(firstEditor.editor) + 1;
await Promises.settled(inactiveEditors.map(({ editor, options }, index) => {
return this.doOpenEditor(editor, {
...options,
inactive: true,
pinned: true,
index: startingIndex + index
}, {
...openEditorsOptions,
// optimization: update the title control later
// https://github.com/microsoft/vscode/issues/130634
skipTitleUpdate: true
});
}));
// Update the title control all at once with all editors
this.titleAreaControl.openEditors(inactiveEditors.map(({ editor }) => editor));
// Opening many editors at once can put any editor to be
// the active one depending on options. As such, we simply
// return the active editor pane after this operation.
return withNullAsUndefined(this.editorPane.activeEditorPane);
}
//#endregion
//#region moveEditor()
moveEditors(editors: { editor: EditorInput; options?: IEditorOptions }[], target: EditorGroupView): void {
// Optimization: knowing that we move many editors, we
// delay the title update to a later point for this group
// through a method that allows for bulk updates but only
// when moving to a different group where many editors
// are more likely to occur.
const internalOptions: IInternalMoveCopyOptions = {
skipTitleUpdate: this !== target
};
for (const { editor, options } of editors) {
this.moveEditor(editor, target, options, internalOptions);
}
// Update the title control all at once with all editors
// in source and target if the title update was skipped
if (internalOptions.skipTitleUpdate) {
const movedEditors = editors.map(({ editor }) => editor);
target.titleAreaControl.openEditors(movedEditors);
this.titleAreaControl.closeEditors(movedEditors);
}
}
moveEditor(editor: EditorInput, target: EditorGroupView, options?: IEditorOptions, internalOptions?: IInternalEditorTitleControlOptions): void {
// Move within same group
if (this === target) {
this.doMoveEditorInsideGroup(editor, options);
}
// Move across groups
else {
this.doMoveOrCopyEditorAcrossGroups(editor, target, options, { ...internalOptions, keepCopy: false });
}
}
private doMoveEditorInsideGroup(candidate: EditorInput, options?: IEditorOpenOptions): void {
const moveToIndex = options ? options.index : undefined;
if (typeof moveToIndex !== 'number') {
return; // do nothing if we move into same group without index
}
const currentIndex = this.model.indexOf(candidate);
if (currentIndex === -1 || currentIndex === moveToIndex) {
return; // do nothing if editor unknown in model or is already at the given index
}
// Update model and make sure to continue to use the editor we get from
// the model. It is possible that the editor was already opened and we
// want to ensure that we use the existing instance in that case.
const editor = this.model.getEditorByIndex(currentIndex);
if (!editor) {
return;
}
// Update model
this.model.moveEditor(editor, moveToIndex);
this.model.pin(editor);
// Forward to title area
this.titleAreaControl.moveEditor(editor, currentIndex, moveToIndex);
this.titleAreaControl.pinEditor(editor);
}
private doMoveOrCopyEditorAcrossGroups(editor: EditorInput, target: EditorGroupView, openOptions?: IEditorOpenOptions, internalOptions?: IInternalMoveCopyOptions): void {
const keepCopy = internalOptions?.keepCopy;
// When moving/copying an editor, try to preserve as much view state as possible
// by checking for the editor to be a text editor and creating the options accordingly
// if so
const options = fillActiveEditorViewState(this, editor, {
...openOptions,
pinned: true, // always pin moved editor
sticky: !keepCopy && this.model.isSticky(editor) // preserve sticky state only if editor is moved (https://github.com/microsoft/vscode/issues/99035)
});
// Indicate will move event
if (!keepCopy) {
this._onWillMoveEditor.fire({
groupId: this.id,
editor,
target: target.id
});
}
// A move to another group is an open first...
target.doOpenEditor(keepCopy ? editor.copy() : editor, options, internalOptions);
// ...and a close afterwards (unless we copy)
if (!keepCopy) {
this.doCloseEditor(editor, false /* do not focus next one behind if any */, { ...internalOptions, context: EditorCloseContext.MOVE });
}
}
//#endregion
//#region copyEditor()
copyEditors(editors: { editor: EditorInput; options?: IEditorOptions }[], target: EditorGroupView): void {
// Optimization: knowing that we move many editors, we
// delay the title update to a later point for this group
// through a method that allows for bulk updates but only
// when moving to a different group where many editors
// are more likely to occur.
const internalOptions: IInternalMoveCopyOptions = {
skipTitleUpdate: this !== target
};
for (const { editor, options } of editors) {
this.copyEditor(editor, target, options, internalOptions);
}
// Update the title control all at once with all editors
// in target if the title update was skipped
if (internalOptions.skipTitleUpdate) {
const copiedEditors = editors.map(({ editor }) => editor);
target.titleAreaControl.openEditors(copiedEditors);
}
}
copyEditor(editor: EditorInput, target: EditorGroupView, options?: IEditorOptions, internalOptions?: IInternalEditorTitleControlOptions): void {
// Move within same group because we do not support to show the same editor
// multiple times in the same group
if (this === target) {
this.doMoveEditorInsideGroup(editor, options);
}
// Copy across groups
else {
this.doMoveOrCopyEditorAcrossGroups(editor, target, options, { ...internalOptions, keepCopy: true });
}
}
//#endregion
//#region closeEditor()
async closeEditor(editor: EditorInput | undefined = this.activeEditor || undefined, options?: ICloseEditorOptions): Promise<boolean> {
return this.doCloseEditorWithConfirmationHandling(editor, options);
}
private async doCloseEditorWithConfirmationHandling(editor: EditorInput | undefined = this.activeEditor || undefined, options?: ICloseEditorOptions, internalOptions?: IInternalEditorCloseOptions): Promise<boolean> {
if (!editor) {
return false;
}
// Check for confirmation and veto
const veto = await this.handleCloseConfirmation([editor]);
if (veto) {
return false;
}
// Do close
this.doCloseEditor(editor, options?.preserveFocus ? false : undefined, internalOptions);
return true;
}
private doCloseEditor(editor: EditorInput, focusNext = (this.accessor.activeGroup === this), internalOptions?: IInternalEditorCloseOptions): void {
// Forward to title control unless skipped via internal options
if (!internalOptions?.skipTitleUpdate) {
this.titleAreaControl.beforeCloseEditor(editor);
}
// Closing the active editor of the group is a bit more work
if (this.model.isActive(editor)) {
this.doCloseActiveEditor(focusNext, internalOptions);
}
// Closing inactive editor is just a model update
else {
this.doCloseInactiveEditor(editor, internalOptions);
}
// Forward to title control unless skipped via internal options
if (!internalOptions?.skipTitleUpdate) {
this.titleAreaControl.closeEditor(editor);
}
}
private doCloseActiveEditor(focusNext = (this.accessor.activeGroup === this), internalOptions?: IInternalEditorCloseOptions): void {
const editorToClose = this.activeEditor;
const restoreFocus = this.shouldRestoreFocus(this.element);
// Optimization: if we are about to close the last editor in this group and settings
// are configured to close the group since it will be empty, we first set the last
// active group as empty before closing the editor. This reduces the amount of editor
// change events that this operation emits and will reduce flicker. Without this
// optimization, this group (if active) would first trigger a active editor change
// event because it became empty, only to then trigger another one when the next
// group gets active.
const closeEmptyGroup = this.accessor.partOptions.closeEmptyGroups;
if (closeEmptyGroup && this.active && this.count === 1) {
const mostRecentlyActiveGroups = this.accessor.getGroups(GroupsOrder.MOST_RECENTLY_ACTIVE);
const nextActiveGroup = mostRecentlyActiveGroups[1]; // [0] will be the current one, so take [1]
if (nextActiveGroup) {
if (restoreFocus) {
nextActiveGroup.focus();
} else {
this.accessor.activateGroup(nextActiveGroup);
}
}
}
// Update model
if (editorToClose) {
this.model.closeEditor(editorToClose, internalOptions?.context);
}
// Open next active if there are more to show
const nextActiveEditor = this.model.activeEditor;
if (nextActiveEditor) {
const preserveFocus = !focusNext;
let activation: EditorActivation | undefined = undefined;
if (preserveFocus && this.accessor.activeGroup !== this) {
// If we are opening the next editor in an inactive group
// without focussing it, ensure we preserve the editor
// group sizes in case that group is minimized.
// https://github.com/microsoft/vscode/issues/117686
activation = EditorActivation.PRESERVE;
}
const options: IEditorOptions = {
preserveFocus,
activation,
// When closing an editor due to an error we can end up in a loop where we continue closing
// editors that fail to open (e.g. when the file no longer exists). We do not want to show
// repeated errors in this case to the user. As such, if we open the next editor and we are
// in a scope of a previous editor failing, we silence the input errors until the editor is
// opened by setting ignoreError: true.
ignoreError: internalOptions?.fromError
};
this.doOpenEditor(nextActiveEditor, options);
}
// Otherwise we are empty, so clear from editor control and send event
else {
// Forward to editor pane
if (editorToClose) {
this.editorPane.closeEditor(editorToClose);
}
// Restore focus to group container as needed unless group gets closed
if (restoreFocus && !closeEmptyGroup) {
this.focus();
}
// Events
this._onDidActiveEditorChange.fire({ editor: undefined });
// Remove empty group if we should
if (closeEmptyGroup) {
this.accessor.removeGroup(this);
}
}
}
private shouldRestoreFocus(target: Element): boolean {
const activeElement = document.activeElement;
if (activeElement === document.body) {
return true; // always restore focus if nothing is focused currently
}
// otherwise check for the active element being an ancestor of the target
return isAncestor(activeElement, target);
}
private doCloseInactiveEditor(editor: EditorInput, internalOptions?: IInternalEditorCloseOptions): void {
// Update model
this.model.closeEditor(editor, internalOptions?.context);
}
private async handleCloseConfirmation(editors: EditorInput[]): Promise<boolean /* veto */> {
if (!editors.length) {
return false; // no veto
}
const editor = editors.shift()!;
// To prevent multiple confirmation dialogs from showing up one after the other
// we check if a pending confirmation is currently showing and if so, join that
let handleCloseConfirmationPromise = this.mapEditorToPendingConfirmation.get(editor);
if (!handleCloseConfirmationPromise) {
handleCloseConfirmationPromise = this.doHandleCloseConfirmation(editor);
this.mapEditorToPendingConfirmation.set(editor, handleCloseConfirmationPromise);
}
let veto: boolean;
try {
veto = await handleCloseConfirmationPromise;
} finally {
this.mapEditorToPendingConfirmation.delete(editor);
}
// Return for the first veto we got
if (veto) {
return veto;
}
// Otherwise continue with the remainders
return this.handleCloseConfirmation(editors);
}
private async doHandleCloseConfirmation(editor: EditorInput, options?: { skipAutoSave: boolean }): Promise<boolean /* veto */> {
if (!this.shouldConfirmClose(editor)) {
return false; // no veto
}
if (editor instanceof SideBySideEditorInput && this.model.contains(editor.primary)) {
return false; // primary-side of editor is still opened somewhere else
}
// Note: we explicitly decide to ask for confirm if closing a normal editor even
// if it is opened in a side-by-side editor in the group. This decision is made
// because it may be less obvious that one side of a side by side editor is dirty
// and can still be changed.
// The only exception is when the same editor is opened on both sides of a side
// by side editor (https://github.com/microsoft/vscode/issues/138442)
if (this.accessor.groups.some(groupView => {
if (groupView === this) {
return false; // skip (we already handled our group above)
}
const otherGroup = groupView;
if (otherGroup.contains(editor, { supportSideBySide: SideBySideEditor.BOTH })) {
return true; // exact editor still opened (either single, or split-in-group)
}
if (editor instanceof SideBySideEditorInput && otherGroup.contains(editor.primary)) {
return true; // primary side of side by side editor still opened
}
return false;
})) {
return false; // editor is still editable somewhere else
}
// In some cases trigger save before opening the dialog depending
// on auto-save configuration.
// However, make sure to respect `skipAutoSave` option in case the automated
// save fails which would result in the editor never closing.
// Also, we only do this if no custom confirmation handling is implemented.
let confirmation = ConfirmResult.CANCEL;
let saveReason = SaveReason.EXPLICIT;
let autoSave = false;
if (!editor.hasCapability(EditorInputCapabilities.Untitled) && !options?.skipAutoSave && !editor.closeHandler) {
// Auto-save on focus change: save, because a dialog would steal focus
// (see https://github.com/microsoft/vscode/issues/108752)
if (this.filesConfigurationService.getAutoSaveMode() === AutoSaveMode.ON_FOCUS_CHANGE) {
autoSave = true;
confirmation = ConfirmResult.SAVE;
saveReason = SaveReason.FOCUS_CHANGE;
}
// Auto-save on window change: save, because on Windows and Linux, a
// native dialog triggers the window focus change
// (see https://github.com/microsoft/vscode/issues/134250)
else if ((isNative && (isWindows || isLinux)) && this.filesConfigurationService.getAutoSaveMode() === AutoSaveMode.ON_WINDOW_CHANGE) {
autoSave = true;
confirmation = ConfirmResult.SAVE;
saveReason = SaveReason.WINDOW_CHANGE;
}
}
// No auto-save on focus change or custom confirmation handler: ask user
if (!autoSave) {
// Switch to editor that we want to handle for confirmation unless showing already
if (!this.activeEditor || !this.activeEditor.matches(editor)) {
await this.doOpenEditor(editor);
}
// Let editor handle confirmation if implemented
if (typeof editor.closeHandler?.confirm === 'function') {
confirmation = await editor.closeHandler.confirm([{ editor, groupId: this.id }]);
}
// Show a file specific confirmation
else {
let name: string;
if (editor instanceof SideBySideEditorInput) {
name = editor.primary.getName(); // prefer shorter names by using primary's name in this case
} else {
name = editor.getName();
}
confirmation = await this.fileDialogService.showSaveConfirm([name]);
}
}
// It could be that the editor's choice of confirmation has changed
// given the check for confirmation is long running, so we check
// again to see if anything needs to happen before closing for good.
// This can happen for example if `autoSave: onFocusChange` is configured
// so that the save happens when the dialog opens.
// However, we only do this unless a custom confirm handler is installed
// that may not be fit to be asked a second time right after.
if (!editor.closeHandler && !this.shouldConfirmClose(editor)) {
return confirmation === ConfirmResult.CANCEL ? true : false;
}
// Otherwise, handle accordingly
switch (confirmation) {
case ConfirmResult.SAVE: {
const result = await editor.save(this.id, { reason: saveReason });
if (!result && autoSave) {
// Save failed and we need to signal this back to the user, so
// we handle the dirty editor again but this time ensuring to
// show the confirm dialog
// (see https://github.com/microsoft/vscode/issues/108752)
return this.doHandleCloseConfirmation(editor, { skipAutoSave: true });
}
return editor.isDirty(); // veto if still dirty
}
case ConfirmResult.DONT_SAVE:
try {
// first try a normal revert where the contents of the editor are restored
await editor.revert(this.id);
return editor.isDirty(); // veto if still dirty
} catch (error) {
this.logService.error(error);
// if that fails, since we are about to close the editor, we accept that
// the editor cannot be reverted and instead do a soft revert that just
// enables us to close the editor. With this, a user can always close a
// dirty editor even when reverting fails.
await editor.revert(this.id, { soft: true });
return editor.isDirty(); // veto if still dirty
}
case ConfirmResult.CANCEL:
return true; // veto
}
}
private shouldConfirmClose(editor: EditorInput): boolean {
if (editor.closeHandler) {
return editor.closeHandler.showConfirm(); // custom handling of confirmation on close
}
return editor.isDirty() && !editor.isSaving(); // editor must be dirty and not saving
}
//#endregion
//#region closeEditors()
async closeEditors(args: EditorInput[] | ICloseEditorsFilter, options?: ICloseEditorOptions): Promise<boolean> {
if (this.isEmpty) {
return true;
}
const editors = this.doGetEditorsToClose(args);
// Check for confirmation and veto
const veto = await this.handleCloseConfirmation(editors.slice(0));
if (veto) {
return false;
}
// Do close
this.doCloseEditors(editors, options);
return true;
}
private doGetEditorsToClose(args: EditorInput[] | ICloseEditorsFilter): EditorInput[] {
if (Array.isArray(args)) {
return args;
}
const filter = args;
const hasDirection = typeof filter.direction === 'number';
let editorsToClose = this.model.getEditors(hasDirection ? EditorsOrder.SEQUENTIAL : EditorsOrder.MOST_RECENTLY_ACTIVE, filter); // in MRU order only if direction is not specified
// Filter: saved or saving only
if (filter.savedOnly) {
editorsToClose = editorsToClose.filter(editor => !editor.isDirty() || editor.isSaving());
}
// Filter: direction (left / right)
else if (hasDirection && filter.except) {
editorsToClose = (filter.direction === CloseDirection.LEFT) ?
editorsToClose.slice(0, this.model.indexOf(filter.except, editorsToClose)) :
editorsToClose.slice(this.model.indexOf(filter.except, editorsToClose) + 1);
}
// Filter: except
else if (filter.except) {
editorsToClose = editorsToClose.filter(editor => filter.except && !editor.matches(filter.except));
}
return editorsToClose;
}
private doCloseEditors(editors: EditorInput[], options?: ICloseEditorOptions): void {
// Close all inactive editors first
let closeActiveEditor = false;
for (const editor of editors) {
if (!this.isActive(editor)) {
this.doCloseInactiveEditor(editor);
} else {
closeActiveEditor = true;
}
}
// Close active editor last if contained in editors list to close
if (closeActiveEditor) {
this.doCloseActiveEditor(options?.preserveFocus ? false : undefined);
}
// Forward to title control
if (editors.length) {
this.titleAreaControl.closeEditors(editors);
}
}
//#endregion
//#region closeAllEditors()
async closeAllEditors(options?: ICloseAllEditorsOptions): Promise<boolean> {
if (this.isEmpty) {
// If the group is empty and the request is to close all editors, we still close
// the editor group is the related setting to close empty groups is enabled for
// a convenient way of removing empty editor groups for the user.
if (this.accessor.partOptions.closeEmptyGroups) {
this.accessor.removeGroup(this);
}
return true;
}
// Check for confirmation and veto
const veto = await this.handleCloseConfirmation(this.model.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE, options));
if (veto) {
return false;
}
// Do close
this.doCloseAllEditors(options);
return true;
}
private doCloseAllEditors(options?: ICloseAllEditorsOptions): void {
// Close all inactive editors first
const editorsToClose: EditorInput[] = [];
for (const editor of this.model.getEditors(EditorsOrder.SEQUENTIAL, options)) {
if (!this.isActive(editor)) {
this.doCloseInactiveEditor(editor);
}
editorsToClose.push(editor);
}
// Close active editor last (unless we skip it, e.g. because it is sticky)
if (this.activeEditor && editorsToClose.includes(this.activeEditor)) {
this.doCloseActiveEditor();
}
// Forward to title control
if (editorsToClose.length) {
this.titleAreaControl.closeEditors(editorsToClose);
}
}
//#endregion
//#region replaceEditors()
async replaceEditors(editors: EditorReplacement[]): Promise<void> {
// Extract active vs. inactive replacements
let activeReplacement: EditorReplacement | undefined;
const inactiveReplacements: EditorReplacement[] = [];
for (let { editor, replacement, forceReplaceDirty, options } of editors) {
const index = this.getIndexOfEditor(editor);
if (index >= 0) {
const isActiveEditor = this.isActive(editor);
// make sure we respect the index of the editor to replace
if (options) {
options.index = index;
} else {
options = { index };
}
options.inactive = !isActiveEditor;
options.pinned = options.pinned ?? true; // unless specified, prefer to pin upon replace
const editorToReplace = { editor, replacement, forceReplaceDirty, options };
if (isActiveEditor) {
activeReplacement = editorToReplace;
} else {
inactiveReplacements.push(editorToReplace);
}
}
}
// Handle inactive first
for (const { editor, replacement, forceReplaceDirty, options } of inactiveReplacements) {
// Open inactive editor
await this.doOpenEditor(replacement, options);
// Close replaced inactive editor unless they match
if (!editor.matches(replacement)) {
let closed = false;
if (forceReplaceDirty) {
this.doCloseEditor(editor, false, { context: EditorCloseContext.REPLACE });
closed = true;
} else {
closed = await this.doCloseEditorWithConfirmationHandling(editor, { preserveFocus: true }, { context: EditorCloseContext.REPLACE });
}
if (!closed) {
return; // canceled
}
}
}
// Handle active last
if (activeReplacement) {
// Open replacement as active editor
const openEditorResult = this.doOpenEditor(activeReplacement.replacement, activeReplacement.options);
// Close replaced active editor unless they match
if (!activeReplacement.editor.matches(activeReplacement.replacement)) {
if (activeReplacement.forceReplaceDirty) {
this.doCloseEditor(activeReplacement.editor, false, { context: EditorCloseContext.REPLACE });
} else {
await this.doCloseEditorWithConfirmationHandling(activeReplacement.editor, { preserveFocus: true }, { context: EditorCloseContext.REPLACE });
}
}
await openEditorResult;
}
}
//#endregion
//#region Locking
get isLocked(): boolean {
if (this.accessor.groups.length === 1) {
// Special case: if only 1 group is opened, never report it as locked
// to ensure editors can always open in the "default" editor group
return false;
}
return this.model.isLocked;
}
lock(locked: boolean): void {
if (this.accessor.groups.length === 1) {
// Special case: if only 1 group is opened, never allow to lock
// to ensure editors can always open in the "default" editor group
locked = false;
}
this.model.lock(locked);
}
//#endregion
//#region Themable
override updateStyles(): void {
const isEmpty = this.isEmpty;
// Container
if (isEmpty) {
this.element.style.backgroundColor = this.getColor(EDITOR_GROUP_EMPTY_BACKGROUND) || '';
} else {
this.element.style.backgroundColor = '';
}
// Title control
const borderColor = this.getColor(EDITOR_GROUP_HEADER_BORDER) || this.getColor(contrastBorder);
if (!isEmpty && borderColor) {
this.titleContainer.classList.add('title-border-bottom');
this.titleContainer.style.setProperty('--title-border-bottom-color', borderColor);
} else {
this.titleContainer.classList.remove('title-border-bottom');
this.titleContainer.style.removeProperty('--title-border-bottom-color');
}
const { showTabs } = this.accessor.partOptions;
this.titleContainer.style.backgroundColor = this.getColor(showTabs ? EDITOR_GROUP_HEADER_TABS_BACKGROUND : EDITOR_GROUP_HEADER_NO_TABS_BACKGROUND) || '';
// Editor container
this.editorContainer.style.backgroundColor = this.getColor(editorBackground) || '';
}
//#endregion
//#region ISerializableView
readonly element: HTMLElement = document.createElement('div');
get minimumWidth(): number { return this.editorPane.minimumWidth; }
get minimumHeight(): number { return this.editorPane.minimumHeight; }
get maximumWidth(): number { return this.editorPane.maximumWidth; }
get maximumHeight(): number { return this.editorPane.maximumHeight; }
get proportionalLayout(): boolean {
if (!this.lastLayout) {
return true;
}
return !(this.lastLayout.width === this.minimumWidth || this.lastLayout.height === this.minimumHeight);
}
private _onDidChange = this._register(new Relay<{ width: number; height: number } | undefined>());
readonly onDidChange = this._onDidChange.event;
layout(width: number, height: number, top: number, left: number): void {
this.lastLayout = { width, height, top, left };
this.element.classList.toggle('max-height-478px', height <= 478);
// Layout the title area first to receive the size it occupies
const titleAreaSize = this.titleAreaControl.layout({
container: new Dimension(width, height),
available: new Dimension(width, height - this.editorPane.minimumHeight)
});
// Pass the container width and remaining height to the editor layout
const editorHeight = Math.max(0, height - titleAreaSize.height);
this.editorContainer.style.height = `${editorHeight}px`;
this.editorPane.layout({ width, height: editorHeight, top: top + titleAreaSize.height, left });
}
relayout(): void {
if (this.lastLayout) {
const { width, height, top, left } = this.lastLayout;
this.layout(width, height, top, left);
}
}
setBoundarySashes(sashes: IBoundarySashes): void {
this.editorPane.setBoundarySashes(sashes);
}
toJSON(): ISerializedEditorGroupModel {
return this.model.serialize();
}
//#endregion
override dispose(): void {
this._disposed = true;
this._onWillDispose.fire();
this.titleAreaControl.dispose();
super.dispose();
}
}
export interface EditorReplacement extends IEditorReplacement {
readonly editor: EditorInput;
readonly replacement: EditorInput;
readonly options?: IEditorOptions;
}
| src/vs/workbench/browser/parts/editor/editorGroupView.ts | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.00019619202066678554,
0.00017115737136919051,
0.0001614219945622608,
0.00017145919264294207,
0.000003268571617809357
]
|
{
"id": 0,
"code_window": [
"\t\t\treturn;\n",
"\t\t}\n",
"\n",
"\t\tconst scrollPadding = this._notebookEditor.notebookOptions.computeTopInsertToolbarHeight(this._notebookEditor.textModel.viewType);\n",
"\n",
"\t\tconst cellExecutions = this._notebookExecutionStateService.getCellExecutionsForNotebook(this._notebookEditor.textModel?.uri)\n",
"\t\t\t.filter(exe => exe.state === NotebookCellExecutionState.Executing);\n",
"\t\tconst notebookExecution = this._notebookExecutionStateService.getExecution(this._notebookEditor.textModel?.uri);\n",
"\t\tconst executionIsVisible = (exe: INotebookCellExecution) => {\n",
"\t\t\tfor (const range of this._notebookEditor.visibleRanges) {\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 44
} | {
"original": {
"content": "/*---------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n\nimport { URI } from 'vs/base/common/uri';\nimport { IRange } from 'vs/editor/common/core/range';\nimport { IEditorWorkerService, IUnicodeHighlightsResult } from 'vs/editor/common/services/editorWorker';\nimport { TextEdit, IInplaceReplaceSupportResult } from 'vs/editor/common/languages';\nimport { IChange, IDiffComputationResult } from 'vs/editor/common/diff/diffComputer';\n\nexport class TestEditorWorkerService implements IEditorWorkerService {\n\n\tdeclare readonly _serviceBrand: undefined;\n\n\tcanComputeUnicodeHighlights(uri: URI): boolean { return false; }\n\tasync computedUnicodeHighlights(uri: URI): Promise<IUnicodeHighlightsResult> { return { ranges: [], hasMore: false, ambiguousCharacterCount: 0, invisibleCharacterCount: 0, nonBasicAsciiCharacterCount: 0 }; }\n\tasync computeDiff(original: URI, modified: URI, ignoreTrimWhitespace: boolean, maxComputationTime: number): Promise<IDiffComputationResult | null> { return null; }\n\tcanComputeDirtyDiff(original: URI, modified: URI): boolean { return false; }\n\tasync computeDirtyDiff(original: URI, modified: URI, ignoreTrimWhitespace: boolean): Promise<IChange[] | null> { return null; }\n\tasync computeMoreMinimalEdits(resource: URI, edits: TextEdit[] | null | undefined): Promise<TextEdit[] | undefined> { return undefined; }\n\tcanComputeWordRanges(resource: URI): boolean { return false; }\n\tasync computeWordRanges(resource: URI, range: IRange): Promise<{ [word: string]: IRange[] } | null> { return null; }\n\tcanNavigateValueSet(resource: URI): boolean { return false; }\n\tasync navigateValueSet(resource: URI, range: IRange, up: boolean): Promise<IInplaceReplaceSupportResult | null> { return null; }\n}\n",
"fileName": "./1.tst"
},
"modified": {
"content": "/*---------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n\nimport { URI } from 'vs/base/common/uri';\nimport { IRange } from 'vs/editor/common/core/range';\nimport { IDiffComputationResult, IEditorWorkerService, IUnicodeHighlightsResult } from 'vs/editor/common/services/editorWorker';\nimport { TextEdit, IInplaceReplaceSupportResult } from 'vs/editor/common/languages';\nimport { IDocumentDiffProviderOptions } from 'vs/editor/common/diff/documentDiffProvider';\nimport { IChange } from 'vs/editor/common/diff/smartLinesDiffComputer';\n\nexport class TestEditorWorkerService implements IEditorWorkerService {\n\n\tdeclare readonly _serviceBrand: undefined;\n\n\tcanComputeUnicodeHighlights(uri: URI): boolean { return false; }\n\tasync computedUnicodeHighlights(uri: URI): Promise<IUnicodeHighlightsResult> { return { ranges: [], hasMore: false, ambiguousCharacterCount: 0, invisibleCharacterCount: 0, nonBasicAsciiCharacterCount: 0 }; }\n\tasync computeDiff(original: URI, modified: URI, options: IDocumentDiffProviderOptions): Promise<IDiffComputationResult | null> { return null; }\n\tcanComputeDirtyDiff(original: URI, modified: URI): boolean { return false; }\n\tasync computeDirtyDiff(original: URI, modified: URI, ignoreTrimWhitespace: boolean): Promise<IChange[] | null> { return null; }\n\tasync computeMoreMinimalEdits(resource: URI, edits: TextEdit[] | null | undefined): Promise<TextEdit[] | undefined> { return undefined; }\n\tcanComputeWordRanges(resource: URI): boolean { return false; }\n\tasync computeWordRanges(resource: URI, range: IRange): Promise<{ [word: string]: IRange[] } | null> { return null; }\n\tcanNavigateValueSet(resource: URI): boolean { return false; }\n\tasync navigateValueSet(resource: URI, range: IRange, up: boolean): Promise<IInplaceReplaceSupportResult | null> { return null; }\n}\n",
"fileName": "./2.tst"
},
"diffs": [
{
"originalRange": "[8,9)",
"modifiedRange": "[8,9)",
"innerChanges": [
{
"originalRange": "[8,9 -> 8,9]",
"modifiedRange": "[8,9 -> 8,33]"
}
]
},
{
"originalRange": "[10,11)",
"modifiedRange": "[10,12)",
"innerChanges": [
{
"originalRange": "[10,1 -> 10,1]",
"modifiedRange": "[10,1 -> 11,1]"
},
{
"originalRange": "[10,17 -> 10,41]",
"modifiedRange": "[11,17 -> 11,17]"
},
{
"originalRange": "[10,72 -> 10,73]",
"modifiedRange": "[11,48 -> 11,59]"
}
]
},
{
"originalRange": "[18,19)",
"modifiedRange": "[19,20)",
"innerChanges": [
{
"originalRange": "[18,50 -> 18,107]",
"modifiedRange": "[19,50 -> 19,87]"
}
]
}
]
} | src/vs/editor/test/node/diffing/fixtures/ts-confusing/advanced.expected.diff.json | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.0001746210764395073,
0.00017225068586412817,
0.0001671276259003207,
0.00017325951193924993,
0.000002587190920166904
]
|
{
"id": 1,
"code_window": [
"\t\t\t\tfor (const cell of this._notebookEditor.getCellsInRange(range)) {\n",
"\t\t\t\t\tif (cell.handle === exe.cellHandle) {\n",
"\t\t\t\t\t\tconst top = this._notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + scrollPadding + 5) {\n",
"\t\t\t\t\t\t\treturn true;\n",
"\t\t\t\t\t\t}\n",
"\t\t\t\t\t}\n",
"\t\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + 5) {\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 54
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { throttle } from 'vs/base/common/decorators';
import { Disposable, MutableDisposable } from 'vs/base/common/lifecycle';
import { INotebookEditor, INotebookEditorContribution } from 'vs/workbench/contrib/notebook/browser/notebookBrowser';
import { registerNotebookContribution } from 'vs/workbench/contrib/notebook/browser/notebookEditorExtensions';
import { NotebookCellExecutionState } from 'vs/workbench/contrib/notebook/common/notebookCommon';
import { INotebookCellExecution, INotebookExecutionStateService } from 'vs/workbench/contrib/notebook/common/notebookExecutionStateService';
import { IUserActivityService } from 'vs/workbench/services/userActivity/common/userActivityService';
export class ExecutionEditorProgressController extends Disposable implements INotebookEditorContribution {
static id: string = 'workbench.notebook.executionEditorProgress';
private readonly _activityMutex = this._register(new MutableDisposable());
constructor(
private readonly _notebookEditor: INotebookEditor,
@INotebookExecutionStateService private readonly _notebookExecutionStateService: INotebookExecutionStateService,
@IUserActivityService private readonly _userActivity: IUserActivityService,
) {
super();
this._register(_notebookEditor.onDidScroll(() => this._update()));
this._register(_notebookExecutionStateService.onDidChangeExecution(e => {
if (e.notebook.toString() !== this._notebookEditor.textModel?.uri.toString()) {
return;
}
this._update();
}));
this._register(_notebookEditor.onDidChangeModel(() => this._update()));
}
@throttle(100)
private _update() {
if (!this._notebookEditor.hasModel()) {
return;
}
const scrollPadding = this._notebookEditor.notebookOptions.computeTopInsertToolbarHeight(this._notebookEditor.textModel.viewType);
const cellExecutions = this._notebookExecutionStateService.getCellExecutionsForNotebook(this._notebookEditor.textModel?.uri)
.filter(exe => exe.state === NotebookCellExecutionState.Executing);
const notebookExecution = this._notebookExecutionStateService.getExecution(this._notebookEditor.textModel?.uri);
const executionIsVisible = (exe: INotebookCellExecution) => {
for (const range of this._notebookEditor.visibleRanges) {
for (const cell of this._notebookEditor.getCellsInRange(range)) {
if (cell.handle === exe.cellHandle) {
const top = this._notebookEditor.getAbsoluteTopOfElement(cell);
if (this._notebookEditor.scrollTop < top + scrollPadding + 5) {
return true;
}
}
}
}
return false;
};
const hasAnyExecution = cellExecutions.length || notebookExecution;
if (hasAnyExecution && !this._activityMutex.value) {
this._activityMutex.value = this._userActivity.markActive();
} else if (!hasAnyExecution && this._activityMutex.value) {
this._activityMutex.clear();
}
const shouldShowEditorProgressbarForCellExecutions = cellExecutions.length && !cellExecutions.some(executionIsVisible) && !cellExecutions.some(e => e.isPaused);
const showEditorProgressBar = !!notebookExecution || shouldShowEditorProgressbarForCellExecutions;
if (showEditorProgressBar) {
this._notebookEditor.showProgress();
} else {
this._notebookEditor.hideProgress();
}
}
}
registerNotebookContribution(ExecutionEditorProgressController.id, ExecutionEditorProgressController);
| src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts | 1 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.9984344840049744,
0.11236217617988586,
0.00016410827811341733,
0.0003787272435147315,
0.31328219175338745
]
|
{
"id": 1,
"code_window": [
"\t\t\t\tfor (const cell of this._notebookEditor.getCellsInRange(range)) {\n",
"\t\t\t\t\tif (cell.handle === exe.cellHandle) {\n",
"\t\t\t\t\t\tconst top = this._notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + scrollPadding + 5) {\n",
"\t\t\t\t\t\t\treturn true;\n",
"\t\t\t\t\t\t}\n",
"\t\t\t\t\t}\n",
"\t\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + 5) {\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 54
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { ProxyIdentifier, IRPCProtocol, Proxied } from 'vs/workbench/services/extensions/common/proxyIdentifier';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
export const IExtHostRpcService = createDecorator<IExtHostRpcService>('IExtHostRpcService');
export interface IExtHostRpcService extends IRPCProtocol {
readonly _serviceBrand: undefined;
}
export class ExtHostRpcService implements IExtHostRpcService {
readonly _serviceBrand: undefined;
readonly getProxy: <T>(identifier: ProxyIdentifier<T>) => Proxied<T>;
readonly set: <T, R extends T> (identifier: ProxyIdentifier<T>, instance: R) => R;
readonly dispose: () => void;
readonly assertRegistered: (identifiers: ProxyIdentifier<any>[]) => void;
readonly drain: () => Promise<void>;
constructor(rpcProtocol: IRPCProtocol) {
this.getProxy = rpcProtocol.getProxy.bind(rpcProtocol);
this.set = rpcProtocol.set.bind(rpcProtocol);
this.dispose = rpcProtocol.dispose.bind(rpcProtocol);
this.assertRegistered = rpcProtocol.assertRegistered.bind(rpcProtocol);
this.drain = rpcProtocol.drain.bind(rpcProtocol);
}
}
| src/vs/workbench/api/common/extHostRpcService.ts | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.0001753859978634864,
0.00017228018259629607,
0.00016636001237202436,
0.00017368733824696392,
0.000003583529633033322
]
|
{
"id": 1,
"code_window": [
"\t\t\t\tfor (const cell of this._notebookEditor.getCellsInRange(range)) {\n",
"\t\t\t\t\tif (cell.handle === exe.cellHandle) {\n",
"\t\t\t\t\t\tconst top = this._notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + scrollPadding + 5) {\n",
"\t\t\t\t\t\t\treturn true;\n",
"\t\t\t\t\t\t}\n",
"\t\t\t\t\t}\n",
"\t\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + 5) {\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 54
} | {
"Region Start": {
"prefix": "#region",
"body": [
"#region $0"
],
"description": "Folding Region Start"
},
"Region End": {
"prefix": "#endregion",
"body": [
"#endregion"
],
"description": "Folding Region End"
}
}
| extensions/csharp/snippets/csharp.code-snippets | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.00017486016440670937,
0.00017440559167880565,
0.00017395101895090193,
0.00017440559167880565,
4.545727279037237e-7
]
|
{
"id": 1,
"code_window": [
"\t\t\t\tfor (const cell of this._notebookEditor.getCellsInRange(range)) {\n",
"\t\t\t\t\tif (cell.handle === exe.cellHandle) {\n",
"\t\t\t\t\t\tconst top = this._notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + scrollPadding + 5) {\n",
"\t\t\t\t\t\t\treturn true;\n",
"\t\t\t\t\t\t}\n",
"\t\t\t\t\t}\n",
"\t\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\t\tif (this._notebookEditor.scrollTop < top + 5) {\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts",
"type": "replace",
"edit_start_line_idx": 54
} | {
"iconDefinitions": {
"_root_folder_dark": {
"iconPath": "./images/root-folder-dark.svg"
},
"_root_folder_open_dark": {
"iconPath": "./images/root-folder-open-dark.svg"
},
"_folder_dark": {
"iconPath": "./images/folder-dark.svg"
},
"_folder_open_dark": {
"iconPath": "./images/folder-open-dark.svg"
},
"_file_dark": {
"iconPath": "./images/document-dark.svg"
},
"_root_folder": {
"iconPath": "./images/root-folder-light.svg"
},
"_root_folder_open": {
"iconPath": "./images/root-folder-open-light.svg"
},
"_folder_light": {
"iconPath": "./images/folder-light.svg"
},
"_folder_open_light": {
"iconPath": "./images/folder-open-light.svg"
},
"_file_light": {
"iconPath": "./images/document-light.svg"
}
},
"folderExpanded": "_folder_open_dark",
"folder": "_folder_dark",
"file": "_file_dark",
"rootFolderExpanded": "_root_folder_open_dark",
"rootFolder": "_root_folder_dark",
"fileExtensions": {
// icons by file extension
},
"fileNames": {
// icons by file name
},
"languageIds": {
// icons by language id
},
"light": {
"folderExpanded": "_folder_open_light",
"folder": "_folder_light",
"rootFolderExpanded": "_root_folder_open",
"rootFolder": "_root_folder",
"file": "_file_light",
"fileExtensions": {
// icons by file extension
},
"fileNames": {
// icons by file name
},
"languageIds": {
// icons by language id
}
},
"highContrast": {
// overrides for high contrast
}
} | extensions/theme-defaults/fileicons/vs_minimal-icon-theme.json | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.00017424534598831087,
0.00017335452139377594,
0.00017162995936814696,
0.00017351169663015753,
7.594537692057202e-7
]
|
{
"id": 2,
"code_window": [
"\tconst updateForScroll = () => {\n",
"\t\tif (cell.isInputCollapsed) {\n",
"\t\t\telement.style.top = '';\n",
"\t\t} else {\n",
"\t\t\tconst scrollPadding = notebookEditor.notebookOptions.computeTopInsertToolbarHeight(notebookEditor.textModel?.viewType);\n",
"\t\t\tconst scrollTop = notebookEditor.scrollTop - scrollPadding;\n",
"\t\t\tconst elementTop = notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\tconst diff = scrollTop - elementTop + extraOffset;\n",
"\t\t\tconst maxTop = cell.layoutInfo.editorHeight + cell.layoutInfo.statusBarHeight - 45; // subtract roughly the height of the execution order label plus padding\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tconst scrollTop = notebookEditor.scrollTop;\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/view/cellParts/cellToolbarStickyScroll.ts",
"type": "replace",
"edit_start_line_idx": 17
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { throttle } from 'vs/base/common/decorators';
import { Disposable, MutableDisposable } from 'vs/base/common/lifecycle';
import { INotebookEditor, INotebookEditorContribution } from 'vs/workbench/contrib/notebook/browser/notebookBrowser';
import { registerNotebookContribution } from 'vs/workbench/contrib/notebook/browser/notebookEditorExtensions';
import { NotebookCellExecutionState } from 'vs/workbench/contrib/notebook/common/notebookCommon';
import { INotebookCellExecution, INotebookExecutionStateService } from 'vs/workbench/contrib/notebook/common/notebookExecutionStateService';
import { IUserActivityService } from 'vs/workbench/services/userActivity/common/userActivityService';
export class ExecutionEditorProgressController extends Disposable implements INotebookEditorContribution {
static id: string = 'workbench.notebook.executionEditorProgress';
private readonly _activityMutex = this._register(new MutableDisposable());
constructor(
private readonly _notebookEditor: INotebookEditor,
@INotebookExecutionStateService private readonly _notebookExecutionStateService: INotebookExecutionStateService,
@IUserActivityService private readonly _userActivity: IUserActivityService,
) {
super();
this._register(_notebookEditor.onDidScroll(() => this._update()));
this._register(_notebookExecutionStateService.onDidChangeExecution(e => {
if (e.notebook.toString() !== this._notebookEditor.textModel?.uri.toString()) {
return;
}
this._update();
}));
this._register(_notebookEditor.onDidChangeModel(() => this._update()));
}
@throttle(100)
private _update() {
if (!this._notebookEditor.hasModel()) {
return;
}
const scrollPadding = this._notebookEditor.notebookOptions.computeTopInsertToolbarHeight(this._notebookEditor.textModel.viewType);
const cellExecutions = this._notebookExecutionStateService.getCellExecutionsForNotebook(this._notebookEditor.textModel?.uri)
.filter(exe => exe.state === NotebookCellExecutionState.Executing);
const notebookExecution = this._notebookExecutionStateService.getExecution(this._notebookEditor.textModel?.uri);
const executionIsVisible = (exe: INotebookCellExecution) => {
for (const range of this._notebookEditor.visibleRanges) {
for (const cell of this._notebookEditor.getCellsInRange(range)) {
if (cell.handle === exe.cellHandle) {
const top = this._notebookEditor.getAbsoluteTopOfElement(cell);
if (this._notebookEditor.scrollTop < top + scrollPadding + 5) {
return true;
}
}
}
}
return false;
};
const hasAnyExecution = cellExecutions.length || notebookExecution;
if (hasAnyExecution && !this._activityMutex.value) {
this._activityMutex.value = this._userActivity.markActive();
} else if (!hasAnyExecution && this._activityMutex.value) {
this._activityMutex.clear();
}
const shouldShowEditorProgressbarForCellExecutions = cellExecutions.length && !cellExecutions.some(executionIsVisible) && !cellExecutions.some(e => e.isPaused);
const showEditorProgressBar = !!notebookExecution || shouldShowEditorProgressbarForCellExecutions;
if (showEditorProgressBar) {
this._notebookEditor.showProgress();
} else {
this._notebookEditor.hideProgress();
}
}
}
registerNotebookContribution(ExecutionEditorProgressController.id, ExecutionEditorProgressController);
| src/vs/workbench/contrib/notebook/browser/contrib/execute/executionEditorProgress.ts | 1 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.9949146509170532,
0.1252862811088562,
0.00016470917034894228,
0.00021699669014196843,
0.3101089298725128
]
|
{
"id": 2,
"code_window": [
"\tconst updateForScroll = () => {\n",
"\t\tif (cell.isInputCollapsed) {\n",
"\t\t\telement.style.top = '';\n",
"\t\t} else {\n",
"\t\t\tconst scrollPadding = notebookEditor.notebookOptions.computeTopInsertToolbarHeight(notebookEditor.textModel?.viewType);\n",
"\t\t\tconst scrollTop = notebookEditor.scrollTop - scrollPadding;\n",
"\t\t\tconst elementTop = notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\tconst diff = scrollTop - elementTop + extraOffset;\n",
"\t\t\tconst maxTop = cell.layoutInfo.editorHeight + cell.layoutInfo.statusBarHeight - 45; // subtract roughly the height of the execution order label plus padding\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tconst scrollTop = notebookEditor.scrollTop;\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/view/cellParts/cellToolbarStickyScroll.ts",
"type": "replace",
"edit_start_line_idx": 17
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
export interface IPropertyData {
classification: 'SystemMetaData' | 'CallstackOrException' | 'CustomerContent' | 'PublicNonPersonalData' | 'EndUserPseudonymizedInformation';
purpose: 'PerformanceAndHealth' | 'FeatureInsight' | 'BusinessInsight';
comment: string;
expiration?: string;
endpoint?: string;
isMeasurement?: boolean;
}
export interface IGDPRProperty {
owner: string;
comment: string;
expiration?: string;
readonly [name: string]: IPropertyData | undefined | IGDPRProperty | string;
}
type IGDPRPropertyWithoutMetadata = Omit<IGDPRProperty, 'owner' | 'comment' | 'expiration'>;
export type OmitMetadata<T> = Omit<T, 'owner' | 'comment' | 'expiration'>;
export type ClassifiedEvent<T extends IGDPRPropertyWithoutMetadata> = {
[k in keyof T]: any
};
export type StrictPropertyChecker<TEvent, TClassification, TError> = keyof TEvent extends keyof OmitMetadata<TClassification> ? keyof OmitMetadata<TClassification> extends keyof TEvent ? TEvent : TError : TError;
export type StrictPropertyCheckError = { error: 'Type of classified event does not match event properties' };
export type StrictPropertyCheck<T extends IGDPRProperty, E> = StrictPropertyChecker<E, ClassifiedEvent<OmitMetadata<T>>, StrictPropertyCheckError>;
| src/vs/platform/telemetry/common/gdprTypings.ts | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.00017598486738279462,
0.00017192278755828738,
0.00016715328092686832,
0.0001722765009617433,
0.000003951314283767715
]
|
{
"id": 2,
"code_window": [
"\tconst updateForScroll = () => {\n",
"\t\tif (cell.isInputCollapsed) {\n",
"\t\t\telement.style.top = '';\n",
"\t\t} else {\n",
"\t\t\tconst scrollPadding = notebookEditor.notebookOptions.computeTopInsertToolbarHeight(notebookEditor.textModel?.viewType);\n",
"\t\t\tconst scrollTop = notebookEditor.scrollTop - scrollPadding;\n",
"\t\t\tconst elementTop = notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\tconst diff = scrollTop - elementTop + extraOffset;\n",
"\t\t\tconst maxTop = cell.layoutInfo.editorHeight + cell.layoutInfo.statusBarHeight - 45; // subtract roughly the height of the execution order label plus padding\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tconst scrollTop = notebookEditor.scrollTop;\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/view/cellParts/cellToolbarStickyScroll.ts",
"type": "replace",
"edit_start_line_idx": 17
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Uri, workspace } from 'vscode';
import { RequestType, BaseLanguageClient } from 'vscode-languageclient';
import { Runtime } from './cssClient';
export namespace FsContentRequest {
export const type: RequestType<{ uri: string; encoding?: string }, string, any> = new RequestType('fs/content');
}
export namespace FsStatRequest {
export const type: RequestType<string, FileStat, any> = new RequestType('fs/stat');
}
export namespace FsReadDirRequest {
export const type: RequestType<string, [string, FileType][], any> = new RequestType('fs/readDir');
}
export function serveFileSystemRequests(client: BaseLanguageClient, runtime: Runtime) {
client.onRequest(FsContentRequest.type, (param: { uri: string; encoding?: string }) => {
const uri = Uri.parse(param.uri);
if (uri.scheme === 'file' && runtime.fs) {
return runtime.fs.getContent(param.uri);
}
return workspace.fs.readFile(uri).then(buffer => {
return new runtime.TextDecoder(param.encoding).decode(buffer);
});
});
client.onRequest(FsReadDirRequest.type, (uriString: string) => {
const uri = Uri.parse(uriString);
if (uri.scheme === 'file' && runtime.fs) {
return runtime.fs.readDirectory(uriString);
}
return workspace.fs.readDirectory(uri);
});
client.onRequest(FsStatRequest.type, (uriString: string) => {
const uri = Uri.parse(uriString);
if (uri.scheme === 'file' && runtime.fs) {
return runtime.fs.stat(uriString);
}
return workspace.fs.stat(uri);
});
}
export enum FileType {
/**
* The file type is unknown.
*/
Unknown = 0,
/**
* A regular file.
*/
File = 1,
/**
* A directory.
*/
Directory = 2,
/**
* A symbolic link to a file.
*/
SymbolicLink = 64
}
export interface FileStat {
/**
* The type of the file, e.g. is a regular file, a directory, or symbolic link
* to a file.
*/
type: FileType;
/**
* The creation timestamp in milliseconds elapsed since January 1, 1970 00:00:00 UTC.
*/
ctime: number;
/**
* The modification timestamp in milliseconds elapsed since January 1, 1970 00:00:00 UTC.
*/
mtime: number;
/**
* The size in bytes.
*/
size: number;
}
export interface RequestService {
getContent(uri: string, encoding?: string): Promise<string>;
stat(uri: string): Promise<FileStat>;
readDirectory(uri: string): Promise<[string, FileType][]>;
}
| extensions/css-language-features/client/src/requests.ts | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.00017614889657124877,
0.00017400983779225498,
0.0001707590854493901,
0.0001738370774546638,
0.000001437701826034754
]
|
{
"id": 2,
"code_window": [
"\tconst updateForScroll = () => {\n",
"\t\tif (cell.isInputCollapsed) {\n",
"\t\t\telement.style.top = '';\n",
"\t\t} else {\n",
"\t\t\tconst scrollPadding = notebookEditor.notebookOptions.computeTopInsertToolbarHeight(notebookEditor.textModel?.viewType);\n",
"\t\t\tconst scrollTop = notebookEditor.scrollTop - scrollPadding;\n",
"\t\t\tconst elementTop = notebookEditor.getAbsoluteTopOfElement(cell);\n",
"\t\t\tconst diff = scrollTop - elementTop + extraOffset;\n",
"\t\t\tconst maxTop = cell.layoutInfo.editorHeight + cell.layoutInfo.statusBarHeight - 45; // subtract roughly the height of the execution order label plus padding\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tconst scrollTop = notebookEditor.scrollTop;\n"
],
"file_path": "src/vs/workbench/contrib/notebook/browser/view/cellParts/cellToolbarStickyScroll.ts",
"type": "replace",
"edit_start_line_idx": 17
} | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as net from 'net';
import { ISocket } from 'vs/base/parts/ipc/common/ipc.net';
import { NodeSocket } from 'vs/base/parts/ipc/node/ipc.net';
import { makeRawSocketHeaders } from 'vs/platform/remote/common/managedSocket';
import { RemoteConnectionType, WebSocketRemoteConnection } from 'vs/platform/remote/common/remoteAuthorityResolver';
import { ISocketFactory } from 'vs/platform/remote/common/remoteSocketFactoryService';
export const nodeSocketFactory = new class implements ISocketFactory<RemoteConnectionType.WebSocket> {
supports(connectTo: WebSocketRemoteConnection): boolean {
return true;
}
connect({ host, port }: WebSocketRemoteConnection, path: string, query: string, debugLabel: string): Promise<ISocket> {
return new Promise<ISocket>((resolve, reject) => {
const socket = net.createConnection({ host: host, port: port }, () => {
socket.removeListener('error', reject);
socket.write(makeRawSocketHeaders(path, query, debugLabel));
const onData = (data: Buffer) => {
const strData = data.toString();
if (strData.indexOf('\r\n\r\n') >= 0) {
// headers received OK
socket.off('data', onData);
resolve(new NodeSocket(socket, debugLabel));
}
};
socket.on('data', onData);
});
// Disable Nagle's algorithm.
socket.setNoDelay(true);
socket.once('error', reject);
});
}
};
| src/vs/platform/remote/node/nodeSocketFactory.ts | 0 | https://github.com/microsoft/vscode/commit/4aa6972baf75b2096e1d7ff23491648bcf2ed527 | [
0.000174170927493833,
0.00017281979671679437,
0.00017073612252715975,
0.00017410135478712618,
0.0000016138970977408462
]
|
{
"id": 0,
"code_window": [
" * instantiated.\n",
" *\n",
" * @exportedAs angular2/di_errors\n",
" */\n",
"export class InstantiationError extends AbstractBindingError {\n",
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep"
],
"after_edit": [
" cause;\n",
" causeKey;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 142
} | import {isBlank} from 'angular2/src/facade/lang';
import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';
import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';
class Engine {
}
class BrokenEngine {
constructor() {
throw "Broken Engine";
}
}
class DashboardSoftware {
}
class Dashboard {
constructor(software: DashboardSoftware) {}
}
class TurboEngine extends Engine {
}
class Car {
engine:Engine;
constructor(engine:Engine) {
this.engine = engine;
}
}
class CarWithLazyEngine {
engineFactory;
constructor(@InjectLazy(Engine) engineFactory) {
this.engineFactory = engineFactory;
}
}
class CarWithOptionalEngine {
engine;
constructor(@Optional() engine:Engine) {
this.engine = engine;
}
}
class CarWithDashboard {
engine:Engine;
dashboard:Dashboard;
constructor(engine:Engine, dashboard:Dashboard) {
this.engine = engine;
this.dashboard = dashboard;
}
}
class SportsCar extends Car {
engine:Engine;
constructor(engine:Engine) {
super(engine);
}
}
class CarWithInject {
engine:Engine;
constructor(@Inject(TurboEngine) engine:Engine) {
this.engine = engine;
}
}
class CyclicEngine {
constructor(car:Car) {}
}
class NoAnnotations {
constructor(secretDependency) {}
}
export function main() {
describe('injector', function () {
it('should instantiate a class without dependencies', function () {
var injector = Injector.resolveAndCreate([Engine]);
var engine = injector.get(Engine);
expect(engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on type information', function () {
var injector = Injector.resolveAndCreate([Engine, Car]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on @Inject annotation', function () {
var injector = Injector.resolveAndCreate([TurboEngine, Engine, CarWithInject]);
var car = injector.get(CarWithInject);
expect(car).toBeAnInstanceOf(CarWithInject);
expect(car.engine).toBeAnInstanceOf(TurboEngine);
});
it('should throw when no type and not @Inject', function () {
expect(() => Injector.resolveAndCreate([NoAnnotations])).toThrowError(
'Cannot resolve all parameters for NoAnnotations. '+
'Make sure they all have valid type or annotations.');
});
it('should cache instances', function () {
var injector = Injector.resolveAndCreate([Engine]);
var e1 = injector.get(Engine);
var e2 = injector.get(Engine);
expect(e1).toBe(e2);
});
it('should bind to a value', function () {
var injector = Injector.resolveAndCreate([
bind(Engine).toValue("fake engine")
]);
var engine = injector.get(Engine);
expect(engine).toEqual("fake engine");
});
it('should bind to a factory', function () {
function sportsCarFactory(e:Engine) {
return new SportsCar(e);
}
var injector = Injector.resolveAndCreate([
Engine,
bind(Car).toFactory(sportsCarFactory)
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should bind to an alias', function() {
var injector = Injector.resolveAndCreate([
Engine,
bind(SportsCar).toClass(SportsCar),
bind(Car).toAlias(SportsCar)
]);
var car = injector.get(Car);
var sportsCar = injector.get(SportsCar);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car).toBe(sportsCar);
});
it('should throw when the aliased binding does not exist', function () {
var injector = Injector.resolveAndCreate([
bind('car').toAlias(SportsCar)
]);
expect(() => injector.get('car')).toThrowError('No provider for SportsCar! (car -> SportsCar)');
});
it('should support overriding factory dependencies', function () {
var injector = Injector.resolveAndCreate([
Engine,
bind(Car).toFactory((e) => new SportsCar(e), [Engine])
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should support optional dependencies', function () {
var injector = Injector.resolveAndCreate([
CarWithOptionalEngine
]);
var car = injector.get(CarWithOptionalEngine);
expect(car.engine).toEqual(null);
});
it("should flatten passed-in bindings", function () {
var injector = Injector.resolveAndCreate([
[[Engine, Car]]
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it("should use the last binding "+
"when there are mutliple bindings for same token", function () {
var injector = Injector.resolveAndCreate([
bind(Engine).toClass(Engine),
bind(Engine).toClass(TurboEngine)
]);
expect(injector.get(Engine)).toBeAnInstanceOf(TurboEngine);
});
it('should use non-type tokens', function () {
var injector = Injector.resolveAndCreate([
bind('token').toValue('value')
]);
expect(injector.get('token')).toEqual('value');
});
it('should throw when given invalid bindings', function () {
expect(() => Injector.resolveAndCreate(["blah"])).toThrowError('Invalid binding blah');
expect(() => Injector.resolveAndCreate([bind("blah")])).toThrowError('Invalid binding blah');
});
it('should provide itself', function () {
var parent = Injector.resolveAndCreate([]);
var child = parent.resolveAndCreateChild([]);
expect(child.get(Injector)).toBe(child);
});
it('should throw when no provider defined', function () {
var injector = Injector.resolveAndCreate([]);
expect(() => injector.get('NonExisting')).toThrowError('No provider for NonExisting!');
});
it('should show the full path when no provider', function () {
var injector = Injector.resolveAndCreate([CarWithDashboard, Engine, Dashboard]);
expect(() => injector.get(CarWithDashboard)).
toThrowError('No provider for DashboardSoftware! (CarWithDashboard -> Dashboard -> DashboardSoftware)');
});
it('should throw when trying to instantiate a cyclic dependency', function () {
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toClass(CyclicEngine)
]);
expect(() => injector.get(Car))
.toThrowError('Cannot instantiate cyclic dependency! (Car -> Engine -> Car)');
expect(() => injector.asyncGet(Car))
.toThrowError('Cannot instantiate cyclic dependency! (Car -> Engine -> Car)');
});
it('should show the full path when error happens in a constructor', function () {
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toClass(BrokenEngine)
]);
try {
injector.get(Car);
throw "Must throw";
} catch (e) {
expect(e.message).toContain("Error during instantiation of Engine! (Car -> Engine)");
}
});
it('should instantiate an object after a failed attempt', function () {
var isBroken = true;
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toFactory(() => isBroken ? new BrokenEngine() : new Engine())
]);
expect(() => injector.get(Car)).toThrowError(new RegExp("Error"));
isBroken = false;
expect(injector.get(Car)).toBeAnInstanceOf(Car);
});
it('should support null values', () => {
var injector = Injector.resolveAndCreate([bind('null').toValue(null)]);
expect(injector.get('null')).toBe(null);
});
describe("default bindings", function () {
it("should be used when no matching binding found", function () {
var injector = Injector.resolveAndCreate([], {defaultBindings: true});
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it("should use the matching binding when it is available", function () {
var injector = Injector.resolveAndCreate([
bind(Car).toClass(SportsCar)
], {defaultBindings: true});
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
});
});
describe("child", function () {
it('should load instances from parent injector', function () {
var parent = Injector.resolveAndCreate([Engine]);
var child = parent.resolveAndCreateChild([]);
var engineFromParent = parent.get(Engine);
var engineFromChild = child.get(Engine);
expect(engineFromChild).toBe(engineFromParent);
});
it("should not use the child bindings when resolving the dependencies of a parent binding", function () {
var parent = Injector.resolveAndCreate([
Car, Engine
]);
var child = parent.resolveAndCreateChild([
bind(Engine).toClass(TurboEngine)
]);
var carFromChild = child.get(Car);
expect(carFromChild.engine).toBeAnInstanceOf(Engine);
});
it('should create new instance in a child injector', function () {
var parent = Injector.resolveAndCreate([Engine]);
var child = parent.resolveAndCreateChild([
bind(Engine).toClass(TurboEngine)
]);
var engineFromParent = parent.get(Engine);
var engineFromChild = child.get(Engine);
expect(engineFromParent).not.toBe(engineFromChild);
expect(engineFromChild).toBeAnInstanceOf(TurboEngine);
});
it("should create child injectors without default bindings", function () {
var parent = Injector.resolveAndCreate([], {defaultBindings: true});
var child = parent.resolveAndCreateChild([]);
//child delegates to parent the creation of Car
var childCar = child.get(Car);
var parentCar = parent.get(Car);
expect(childCar).toBe(parentCar);
});
});
describe("lazy", function () {
it("should create dependencies lazily", function () {
var injector = Injector.resolveAndCreate([
Engine,
CarWithLazyEngine
]);
var car = injector.get(CarWithLazyEngine);
expect(car.engineFactory()).toBeAnInstanceOf(Engine);
});
it("should cache instance created lazily", function () {
var injector = Injector.resolveAndCreate([
Engine,
CarWithLazyEngine
]);
var car = injector.get(CarWithLazyEngine);
var e1 = car.engineFactory();
var e2 = car.engineFactory();
expect(e1).toBe(e2);
});
});
describe('resolve', function() {
it('should resolve and flatten', function() {
var bindings = Injector.resolve([Engine, [BrokenEngine]]);
bindings.forEach(function(b) {
if (isBlank(b)) return; // the result is a sparse array
expect(b instanceof ResolvedBinding).toBe(true);
});
});
});
});
}
| modules/angular2/test/di/injector_spec.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.9509792327880859,
0.04807266965508461,
0.0001674169470788911,
0.0001755733828758821,
0.2045021802186966
]
|
{
"id": 0,
"code_window": [
" * instantiated.\n",
" *\n",
" * @exportedAs angular2/di_errors\n",
" */\n",
"export class InstantiationError extends AbstractBindingError {\n",
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep"
],
"after_edit": [
" cause;\n",
" causeKey;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 142
} | library web_foo;
import 'package:angular2/src/core/application.dart';
import 'package:angular2/src/reflection/reflection.dart';
import 'package:angular2/src/reflection/reflection_capabilities.dart';
void main() {
reflector.reflectionCapabilities = new ReflectionCapabilities();
bootstrap(MyComponent);
}
| modules/angular2/test/transform/reflection_remover/reflection_remover_files/index.dart | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017341594502795488,
0.00017113990907091647,
0.00016886387311387807,
0.00017113990907091647,
0.0000022760359570384026
]
|
{
"id": 0,
"code_window": [
" * instantiated.\n",
" *\n",
" * @exportedAs angular2/di_errors\n",
" */\n",
"export class InstantiationError extends AbstractBindingError {\n",
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep"
],
"after_edit": [
" cause;\n",
" causeKey;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 142
} | import {Injectable} from 'angular2/di';
import {XHR} from 'angular2/src/services/xhr';
import {ListWrapper} from 'angular2/src/facade/collection';
import {UrlResolver} from 'angular2/src/services/url_resolver';
import {StyleUrlResolver} from './style_url_resolver';
import {
isBlank,
isPresent,
RegExp,
RegExpWrapper,
StringWrapper,
normalizeBlank,
} from 'angular2/src/facade/lang';
import {
Promise,
PromiseWrapper,
} from 'angular2/src/facade/async';
/**
* Inline @import rules in the given CSS.
*
* When an @import rules is inlined, it's url are rewritten.
*/
@Injectable()
export class StyleInliner {
_xhr: XHR;
_urlResolver: UrlResolver;
_styleUrlResolver: StyleUrlResolver;
constructor(xhr: XHR, styleUrlResolver: StyleUrlResolver, urlResolver: UrlResolver) {
this._xhr = xhr;
this._urlResolver = urlResolver;
this._styleUrlResolver = styleUrlResolver;
}
/**
* Inline the @imports rules in the given CSS text.
*
* The baseUrl is required to rewrite URLs in the inlined content.
*
* @param {string} cssText
* @param {string} baseUrl
* @returns {*} a Promise<string> when @import rules are present, a string otherwise
*/
// TODO(vicb): Union types: returns either a Promise<string> or a string
// TODO(vicb): commented out @import rules should not be inlined
inlineImports(cssText: string, baseUrl: string) {
return this._inlineImports(cssText, baseUrl, []);
}
_inlineImports(cssText: string, baseUrl: string, inlinedUrls: List<string>) {
var partIndex = 0;
var parts = StringWrapper.split(cssText, _importRe);
if (parts.length === 1) {
// no @import rule found, return the original css
return cssText;
}
var promises = [];
while (partIndex < parts.length - 1) {
// prefix is the content before the @import rule
var prefix = parts[partIndex];
// rule is the parameter of the @import rule
var rule = parts[partIndex + 1];
var url = _extractUrl(rule);
if (isPresent(url)) {
url = this._urlResolver.resolve(baseUrl, url);
}
var mediaQuery = _extractMediaQuery(rule);
var promise;
if (isBlank(url)) {
promise = PromiseWrapper.resolve(`/* Invalid import rule: "@import ${rule};" */`);
} else if (ListWrapper.contains(inlinedUrls, url)) {
// The current import rule has already been inlined, return the prefix only
// Importing again might cause a circular dependency
promise = PromiseWrapper.resolve(prefix);
} else {
ListWrapper.push(inlinedUrls, url);
promise = PromiseWrapper.then(
this._xhr.get(url),
(css) => {
// resolve nested @import rules
css = this._inlineImports(css, url, inlinedUrls);
if (PromiseWrapper.isPromise(css)) {
// wait until nested @import are inlined
return css.then((css) => {
return prefix + this._transformImportedCss(css, mediaQuery, url) + '\n'
}) ;
} else {
// there are no nested @import, return the css
return prefix + this._transformImportedCss(css, mediaQuery, url) + '\n';
}
},
(error) => `/* failed to import ${url} */\n`
);
}
ListWrapper.push(promises, promise);
partIndex += 2;
}
return PromiseWrapper.all(promises).then(function (cssParts) {
var cssText = cssParts.join('');
if (partIndex < parts.length) {
// append then content located after the last @import rule
cssText += parts[partIndex];
}
return cssText;
});
}
_transformImportedCss(css: string, mediaQuery: string, url: string): string {
css = this._styleUrlResolver.resolveUrls(css, url);
return _wrapInMediaRule(css, mediaQuery);
}
}
// Extracts the url from an import rule, supported formats:
// - 'url' / "url",
// - url(url) / url('url') / url("url")
function _extractUrl(importRule: string): string {
var match = RegExpWrapper.firstMatch(_urlRe, importRule);
if (isBlank(match)) return null;
return isPresent(match[1]) ? match[1] : match[2];
}
// Extracts the media query from an import rule.
// Returns null when there is no media query.
function _extractMediaQuery(importRule: string): string {
var match = RegExpWrapper.firstMatch(_mediaQueryRe, importRule);
if (isBlank(match)) return null;
var mediaQuery = match[1].trim();
return (mediaQuery.length > 0) ? mediaQuery: null;
}
// Wraps the css in a media rule when the media query is not null
function _wrapInMediaRule(css: string, query: string): string {
return (isBlank(query)) ? css : `@media ${query} {\n${css}\n}`;
}
var _importRe = RegExpWrapper.create('@import\\s+([^;]+);');
var _urlRe = RegExpWrapper.create(
'url\\(\\s*?[\'"]?([^\'")]+)[\'"]?|' + // url(url) or url('url') or url("url")
'[\'"]([^\'")]+)[\'"]' // "url" or 'url'
);
var _mediaQueryRe = RegExpWrapper.create('[\'"][^\'"]+[\'"]\\s*\\)?\\s*(.*)');
| modules/angular2/src/render/dom/shadow_dom/style_inliner.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.000573887606151402,
0.00019962401711381972,
0.00016666564624756575,
0.00017054149066098034,
0.00009710418089525774
]
|
{
"id": 0,
"code_window": [
" * instantiated.\n",
" *\n",
" * @exportedAs angular2/di_errors\n",
" */\n",
"export class InstantiationError extends AbstractBindingError {\n",
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep"
],
"after_edit": [
" cause;\n",
" causeKey;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 142
} | import {
afterEach,
AsyncTestCompleter,
beforeEach,
ddescribe,
describe,
expect,
iit,
inject,
it,
xit,
} from 'angular2/test_lib';
import { DateWrapper, Json, RegExpWrapper, isPresent } from 'angular2/src/facade/lang';
import { PromiseWrapper } from 'angular2/src/facade/async';
import {
bind, Injector,
SampleDescription,
MeasureValues,
Options
} from 'benchpress/common';
import { JsonFileReporter } from 'benchpress/src/reporter/json_file_reporter';
export function main() {
describe('file reporter', () => {
var loggedFile;
function createReporter({sampleId, descriptions, metrics, path}) {
var bindings = [
JsonFileReporter.BINDINGS,
bind(SampleDescription).toValue(new SampleDescription(sampleId, descriptions, metrics)),
bind(JsonFileReporter.PATH).toValue(path),
bind(Options.NOW).toValue( () => DateWrapper.fromMillis(1234) ),
bind(Options.WRITE_FILE).toValue((filename, content) => {
loggedFile = {
'filename': filename,
'content': content
};
return PromiseWrapper.resolve(null);
})
];
return Injector.resolveAndCreate(bindings).get(JsonFileReporter);
}
it('should write all data into a file', inject([AsyncTestCompleter], (async) => {
createReporter({
sampleId: 'someId',
descriptions: [{ 'a': 2 }],
path: 'somePath',
metrics: {
'script': 'script time'
}
}).reportSample([
mv(0, 0, { 'a': 3, 'b': 6})
], [mv(0, 0, {
'a': 3, 'b': 6
}), mv(1, 1, {
'a': 5, 'b': 9
})]);
var regExp = RegExpWrapper.create('somePath/someId_\\d+\\.json');
expect(isPresent(RegExpWrapper.firstMatch(regExp, loggedFile['filename']))).toBe(true);
var parsedContent = Json.parse(loggedFile['content']);
expect(parsedContent).toEqual({
"description": {
"id": "someId",
"description": {
"a": 2
},
"metrics": {"script": "script time"}
},
"completeSample": [{
"timeStamp": "1970-01-01T00:00:00.000Z",
"runIndex": 0,
"values": {
"a": 3,
"b": 6
}
}],
"validSample": [
{
"timeStamp": "1970-01-01T00:00:00.000Z",
"runIndex": 0,
"values": {
"a": 3,
"b": 6
}
},
{
"timeStamp": "1970-01-01T00:00:00.001Z",
"runIndex": 1,
"values": {
"a": 5,
"b": 9
}
}
]
});
async.done();
}));
});
}
function mv(runIndex, time, values) {
return new MeasureValues(runIndex, DateWrapper.fromMillis(time), values);
}
| modules/benchpress/test/reporter/json_file_reporter_spec.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017272877448704094,
0.00016948874690569937,
0.00016815384151414037,
0.000168783706612885,
0.000001449109959139605
]
|
{
"id": 1,
"code_window": [
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n",
" constructor(originalException, key) {\n",
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" constructor(cause, key) {\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 143
} | import {isBlank} from 'angular2/src/facade/lang';
import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';
import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';
class Engine {
}
class BrokenEngine {
constructor() {
throw "Broken Engine";
}
}
class DashboardSoftware {
}
class Dashboard {
constructor(software: DashboardSoftware) {}
}
class TurboEngine extends Engine {
}
class Car {
engine:Engine;
constructor(engine:Engine) {
this.engine = engine;
}
}
class CarWithLazyEngine {
engineFactory;
constructor(@InjectLazy(Engine) engineFactory) {
this.engineFactory = engineFactory;
}
}
class CarWithOptionalEngine {
engine;
constructor(@Optional() engine:Engine) {
this.engine = engine;
}
}
class CarWithDashboard {
engine:Engine;
dashboard:Dashboard;
constructor(engine:Engine, dashboard:Dashboard) {
this.engine = engine;
this.dashboard = dashboard;
}
}
class SportsCar extends Car {
engine:Engine;
constructor(engine:Engine) {
super(engine);
}
}
class CarWithInject {
engine:Engine;
constructor(@Inject(TurboEngine) engine:Engine) {
this.engine = engine;
}
}
class CyclicEngine {
constructor(car:Car) {}
}
class NoAnnotations {
constructor(secretDependency) {}
}
export function main() {
describe('injector', function () {
it('should instantiate a class without dependencies', function () {
var injector = Injector.resolveAndCreate([Engine]);
var engine = injector.get(Engine);
expect(engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on type information', function () {
var injector = Injector.resolveAndCreate([Engine, Car]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on @Inject annotation', function () {
var injector = Injector.resolveAndCreate([TurboEngine, Engine, CarWithInject]);
var car = injector.get(CarWithInject);
expect(car).toBeAnInstanceOf(CarWithInject);
expect(car.engine).toBeAnInstanceOf(TurboEngine);
});
it('should throw when no type and not @Inject', function () {
expect(() => Injector.resolveAndCreate([NoAnnotations])).toThrowError(
'Cannot resolve all parameters for NoAnnotations. '+
'Make sure they all have valid type or annotations.');
});
it('should cache instances', function () {
var injector = Injector.resolveAndCreate([Engine]);
var e1 = injector.get(Engine);
var e2 = injector.get(Engine);
expect(e1).toBe(e2);
});
it('should bind to a value', function () {
var injector = Injector.resolveAndCreate([
bind(Engine).toValue("fake engine")
]);
var engine = injector.get(Engine);
expect(engine).toEqual("fake engine");
});
it('should bind to a factory', function () {
function sportsCarFactory(e:Engine) {
return new SportsCar(e);
}
var injector = Injector.resolveAndCreate([
Engine,
bind(Car).toFactory(sportsCarFactory)
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should bind to an alias', function() {
var injector = Injector.resolveAndCreate([
Engine,
bind(SportsCar).toClass(SportsCar),
bind(Car).toAlias(SportsCar)
]);
var car = injector.get(Car);
var sportsCar = injector.get(SportsCar);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car).toBe(sportsCar);
});
it('should throw when the aliased binding does not exist', function () {
var injector = Injector.resolveAndCreate([
bind('car').toAlias(SportsCar)
]);
expect(() => injector.get('car')).toThrowError('No provider for SportsCar! (car -> SportsCar)');
});
it('should support overriding factory dependencies', function () {
var injector = Injector.resolveAndCreate([
Engine,
bind(Car).toFactory((e) => new SportsCar(e), [Engine])
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should support optional dependencies', function () {
var injector = Injector.resolveAndCreate([
CarWithOptionalEngine
]);
var car = injector.get(CarWithOptionalEngine);
expect(car.engine).toEqual(null);
});
it("should flatten passed-in bindings", function () {
var injector = Injector.resolveAndCreate([
[[Engine, Car]]
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it("should use the last binding "+
"when there are mutliple bindings for same token", function () {
var injector = Injector.resolveAndCreate([
bind(Engine).toClass(Engine),
bind(Engine).toClass(TurboEngine)
]);
expect(injector.get(Engine)).toBeAnInstanceOf(TurboEngine);
});
it('should use non-type tokens', function () {
var injector = Injector.resolveAndCreate([
bind('token').toValue('value')
]);
expect(injector.get('token')).toEqual('value');
});
it('should throw when given invalid bindings', function () {
expect(() => Injector.resolveAndCreate(["blah"])).toThrowError('Invalid binding blah');
expect(() => Injector.resolveAndCreate([bind("blah")])).toThrowError('Invalid binding blah');
});
it('should provide itself', function () {
var parent = Injector.resolveAndCreate([]);
var child = parent.resolveAndCreateChild([]);
expect(child.get(Injector)).toBe(child);
});
it('should throw when no provider defined', function () {
var injector = Injector.resolveAndCreate([]);
expect(() => injector.get('NonExisting')).toThrowError('No provider for NonExisting!');
});
it('should show the full path when no provider', function () {
var injector = Injector.resolveAndCreate([CarWithDashboard, Engine, Dashboard]);
expect(() => injector.get(CarWithDashboard)).
toThrowError('No provider for DashboardSoftware! (CarWithDashboard -> Dashboard -> DashboardSoftware)');
});
it('should throw when trying to instantiate a cyclic dependency', function () {
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toClass(CyclicEngine)
]);
expect(() => injector.get(Car))
.toThrowError('Cannot instantiate cyclic dependency! (Car -> Engine -> Car)');
expect(() => injector.asyncGet(Car))
.toThrowError('Cannot instantiate cyclic dependency! (Car -> Engine -> Car)');
});
it('should show the full path when error happens in a constructor', function () {
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toClass(BrokenEngine)
]);
try {
injector.get(Car);
throw "Must throw";
} catch (e) {
expect(e.message).toContain("Error during instantiation of Engine! (Car -> Engine)");
}
});
it('should instantiate an object after a failed attempt', function () {
var isBroken = true;
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toFactory(() => isBroken ? new BrokenEngine() : new Engine())
]);
expect(() => injector.get(Car)).toThrowError(new RegExp("Error"));
isBroken = false;
expect(injector.get(Car)).toBeAnInstanceOf(Car);
});
it('should support null values', () => {
var injector = Injector.resolveAndCreate([bind('null').toValue(null)]);
expect(injector.get('null')).toBe(null);
});
describe("default bindings", function () {
it("should be used when no matching binding found", function () {
var injector = Injector.resolveAndCreate([], {defaultBindings: true});
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it("should use the matching binding when it is available", function () {
var injector = Injector.resolveAndCreate([
bind(Car).toClass(SportsCar)
], {defaultBindings: true});
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
});
});
describe("child", function () {
it('should load instances from parent injector', function () {
var parent = Injector.resolveAndCreate([Engine]);
var child = parent.resolveAndCreateChild([]);
var engineFromParent = parent.get(Engine);
var engineFromChild = child.get(Engine);
expect(engineFromChild).toBe(engineFromParent);
});
it("should not use the child bindings when resolving the dependencies of a parent binding", function () {
var parent = Injector.resolveAndCreate([
Car, Engine
]);
var child = parent.resolveAndCreateChild([
bind(Engine).toClass(TurboEngine)
]);
var carFromChild = child.get(Car);
expect(carFromChild.engine).toBeAnInstanceOf(Engine);
});
it('should create new instance in a child injector', function () {
var parent = Injector.resolveAndCreate([Engine]);
var child = parent.resolveAndCreateChild([
bind(Engine).toClass(TurboEngine)
]);
var engineFromParent = parent.get(Engine);
var engineFromChild = child.get(Engine);
expect(engineFromParent).not.toBe(engineFromChild);
expect(engineFromChild).toBeAnInstanceOf(TurboEngine);
});
it("should create child injectors without default bindings", function () {
var parent = Injector.resolveAndCreate([], {defaultBindings: true});
var child = parent.resolveAndCreateChild([]);
//child delegates to parent the creation of Car
var childCar = child.get(Car);
var parentCar = parent.get(Car);
expect(childCar).toBe(parentCar);
});
});
describe("lazy", function () {
it("should create dependencies lazily", function () {
var injector = Injector.resolveAndCreate([
Engine,
CarWithLazyEngine
]);
var car = injector.get(CarWithLazyEngine);
expect(car.engineFactory()).toBeAnInstanceOf(Engine);
});
it("should cache instance created lazily", function () {
var injector = Injector.resolveAndCreate([
Engine,
CarWithLazyEngine
]);
var car = injector.get(CarWithLazyEngine);
var e1 = car.engineFactory();
var e2 = car.engineFactory();
expect(e1).toBe(e2);
});
});
describe('resolve', function() {
it('should resolve and flatten', function() {
var bindings = Injector.resolve([Engine, [BrokenEngine]]);
bindings.forEach(function(b) {
if (isBlank(b)) return; // the result is a sparse array
expect(b instanceof ResolvedBinding).toBe(true);
});
});
});
});
}
| modules/angular2/test/di/injector_spec.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.9516637921333313,
0.11868526041507721,
0.0001660248526604846,
0.00017101115372497588,
0.2935594916343689
]
|
{
"id": 1,
"code_window": [
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n",
" constructor(originalException, key) {\n",
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" constructor(cause, key) {\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 143
} | import {describe, ddescribe, it, iit, xit, xdescribe, expect, beforeEach, el} from 'angular2/test_lib';
import {setterFactory} from 'angular2/src/render/dom/view/property_setter_factory';
import {DOM} from 'angular2/src/dom/dom_adapter';
export function main() {
var div;
beforeEach( () => {
div = el('<div></div>');
});
describe('property setter factory', () => {
it('should return a setter for a property', () => {
var setterFn = setterFactory('title');
setterFn(div, 'Hello');
expect(div.title).toEqual('Hello');
var otherSetterFn = setterFactory('title');
expect(setterFn).toBe(otherSetterFn);
});
it('should return a setter for an attribute', () => {
var setterFn = setterFactory('attr.role');
setterFn(div, 'button');
expect(DOM.getAttribute(div, 'role')).toEqual('button');
setterFn(div, null);
expect(DOM.getAttribute(div, 'role')).toEqual(null);
expect(() => {
setterFn(div, 4);
}).toThrowError("Invalid role attribute, only string values are allowed, got '4'");
var otherSetterFn = setterFactory('attr.role');
expect(setterFn).toBe(otherSetterFn);
});
it('should return a setter for a class', () => {
var setterFn = setterFactory('class.active');
setterFn(div, true);
expect(DOM.hasClass(div, 'active')).toEqual(true);
setterFn(div, false);
expect(DOM.hasClass(div, 'active')).toEqual(false);
var otherSetterFn = setterFactory('class.active');
expect(setterFn).toBe(otherSetterFn);
});
it('should return a setter for a style', () => {
var setterFn = setterFactory('style.width');
setterFn(div, '40px');
expect(DOM.getStyle(div, 'width')).toEqual('40px');
setterFn(div, null);
expect(DOM.getStyle(div, 'width')).toEqual('');
var otherSetterFn = setterFactory('style.width');
expect(setterFn).toBe(otherSetterFn);
});
it('should return a setter for a style with a unit', () => {
var setterFn = setterFactory('style.height.px');
setterFn(div, 40);
expect(DOM.getStyle(div, 'height')).toEqual('40px');
setterFn(div, null);
expect(DOM.getStyle(div, 'height')).toEqual('');
var otherSetterFn = setterFactory('style.height.px');
expect(setterFn).toBe(otherSetterFn);
});
it('should return a setter for innerHtml', () => {
var setterFn = setterFactory('innerHtml');
setterFn(div, '<span></span>');
expect(DOM.getInnerHTML(div)).toEqual('<span></span>');
var otherSetterFn = setterFactory('innerHtml');
expect(setterFn).toBe(otherSetterFn);
});
});
}
| modules/angular2/test/render/dom/view/property_setter_factory_spec.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017552418285049498,
0.00017234614642802626,
0.000170123326824978,
0.00017197782290168107,
0.0000018149269180867122
]
|
{
"id": 1,
"code_window": [
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n",
" constructor(originalException, key) {\n",
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" constructor(cause, key) {\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 143
} | modules/angular2_material/src/components/progress-circular/progress-circular.scss | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.0001710277865640819,
0.0001710277865640819,
0.0001710277865640819,
0.0001710277865640819,
0
]
|
|
{
"id": 1,
"code_window": [
" // TODO(tbosch): Can't do key:Key as this results in a circular dependency!\n",
" constructor(originalException, key) {\n",
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" constructor(cause, key) {\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 143
} | import {describe, beforeEach, it, expect, iit, ddescribe, el} from 'angular2/test_lib';
import {ListWrapper, List, MapWrapper} from 'angular2/src/facade/collection';
import {DOM} from 'angular2/src/dom/dom_adapter';
import {isPresent, NumberWrapper, StringWrapper} from 'angular2/src/facade/lang';
import {CompilePipeline} from 'angular2/src/render/dom/compiler/compile_pipeline';
import {CompileElement} from 'angular2/src/render/dom/compiler/compile_element';
import {CompileStep} from 'angular2/src/render/dom/compiler/compile_step'
import {CompileControl} from 'angular2/src/render/dom/compiler/compile_control';
import {ProtoViewBuilder} from 'angular2/src/render/dom/view/proto_view_builder';
export function main() {
describe('compile_pipeline', () => {
describe('children compilation', () => {
it('should walk the tree in depth first order including template contents', () => {
var element = el('<div id="1"><template id="2"><span id="3"></span></template></div>');
var step0Log = [];
var results = new CompilePipeline([createLoggerStep(step0Log)]).process(element);
expect(step0Log).toEqual(['1', '1<2', '2<3']);
expect(resultIdLog(results)).toEqual(['1', '2', '3']);
});
it('should stop walking the tree when compileChildren is false', () => {
var element = el('<div id="1"><template id="2" ignore-children><span id="3"></span></template></div>');
var step0Log = [];
var pipeline = new CompilePipeline([new IgnoreChildrenStep(), createLoggerStep(step0Log)]);
var results = pipeline.process(element);
expect(step0Log).toEqual(['1', '1<2']);
expect(resultIdLog(results)).toEqual(['1', '2']);
});
});
it('should inherit protoViewBuilders to children', () => {
var element = el('<div><div><span viewroot><span></span></span></div></div>');
var pipeline = new CompilePipeline([new MockStep((parent, current, control) => {
if (isPresent(DOM.getAttribute(current.element, 'viewroot'))) {
current.inheritedProtoView = new ProtoViewBuilder(current.element);
}
})]);
var results = pipeline.process(element);
expect(results[0].inheritedProtoView).toBe(results[1].inheritedProtoView);
expect(results[2].inheritedProtoView).toBe(results[3].inheritedProtoView);
});
it('should inherit elementBinderBuilders to children', () => {
var element = el('<div bind><div><span bind><span></span></span></div></div>');
var pipeline = new CompilePipeline([new MockStep((parent, current, control) => {
if (isPresent(DOM.getAttribute(current.element, 'bind'))) {
current.bindElement();
}
})]);
var results = pipeline.process(element);
expect(results[0].inheritedElementBinder).toBe(results[1].inheritedElementBinder);
expect(results[2].inheritedElementBinder).toBe(results[3].inheritedElementBinder);
});
it('should mark root elements as viewRoot', () => {
var rootElement = el('<div></div>');
var results = new CompilePipeline([]).process(rootElement);
expect(results[0].isViewRoot).toBe(true);
});
it('should calculate distanceToParent / parent correctly', () => {
var element = el('<div bind><div bind></div><div><div bind></div></div></div>');
var pipeline = new CompilePipeline([new MockStep((parent, current, control) => {
if (isPresent(DOM.getAttribute(current.element, 'bind'))) {
current.bindElement();
}
})]);
var results = pipeline.process(element);
expect(results[0].inheritedElementBinder.distanceToParent).toBe(0);
expect(results[1].inheritedElementBinder.distanceToParent).toBe(1);
expect(results[3].inheritedElementBinder.distanceToParent).toBe(2);
expect(results[1].inheritedElementBinder.parent).toBe(results[0].inheritedElementBinder);
expect(results[3].inheritedElementBinder.parent).toBe(results[0].inheritedElementBinder);
});
it('should not execute further steps when ignoreCurrentElement has been called', () => {
var element = el('<div id="1"><span id="2" ignore-current></span><span id="3"></span></div>');
var logs = [];
var pipeline = new CompilePipeline([
new IgnoreCurrentElementStep(),
createLoggerStep(logs),
]);
var results = pipeline.process(element);
expect(results.length).toBe(2);
expect(logs).toEqual(['1', '1<3'])
});
describe('control.addParent', () => {
it('should report the new parent to the following processor and the result', () => {
var element = el('<div id="1"><span wrap0="1" id="2"><b id="3"></b></span></div>');
var step0Log = [];
var step1Log = [];
var pipeline = new CompilePipeline([
createWrapperStep('wrap0', step0Log),
createLoggerStep(step1Log)
]);
var result = pipeline.process(element);
expect(step0Log).toEqual(['1', '1<2', '2<3']);
expect(step1Log).toEqual(['1', '1<wrap0#0', 'wrap0#0<2', '2<3']);
expect(resultIdLog(result)).toEqual(['1', 'wrap0#0', '2', '3']);
});
it('should allow to add a parent by multiple processors to the same element', () => {
var element = el('<div id="1"><span wrap0="1" wrap1="1" id="2"><b id="3"></b></span></div>');
var step0Log = [];
var step1Log = [];
var step2Log = [];
var pipeline = new CompilePipeline([
createWrapperStep('wrap0', step0Log),
createWrapperStep('wrap1', step1Log),
createLoggerStep(step2Log)
]);
var result = pipeline.process(element);
expect(step0Log).toEqual(['1', '1<2', '2<3']);
expect(step1Log).toEqual(['1', '1<wrap0#0', 'wrap0#0<2', '2<3']);
expect(step2Log).toEqual(['1', '1<wrap0#0', 'wrap0#0<wrap1#0', 'wrap1#0<2', '2<3']);
expect(resultIdLog(result)).toEqual(['1', 'wrap0#0', 'wrap1#0', '2', '3']);
});
it('should allow to add a parent by multiple processors to different elements', () => {
var element = el('<div id="1"><span wrap0="1" id="2"><b id="3" wrap1="1"></b></span></div>');
var step0Log = [];
var step1Log = [];
var step2Log = [];
var pipeline = new CompilePipeline([
createWrapperStep('wrap0', step0Log),
createWrapperStep('wrap1', step1Log),
createLoggerStep(step2Log)
]);
var result = pipeline.process(element);
expect(step0Log).toEqual(['1', '1<2', '2<3']);
expect(step1Log).toEqual(['1', '1<wrap0#0', 'wrap0#0<2', '2<3']);
expect(step2Log).toEqual(['1', '1<wrap0#0', 'wrap0#0<2', '2<wrap1#0', 'wrap1#0<3']);
expect(resultIdLog(result)).toEqual(['1', 'wrap0#0', '2', 'wrap1#0', '3']);
});
it('should allow to add multiple parents by the same processor', () => {
var element = el('<div id="1"><span wrap0="2" id="2"><b id="3"></b></span></div>');
var step0Log = [];
var step1Log = [];
var pipeline = new CompilePipeline([
createWrapperStep('wrap0', step0Log),
createLoggerStep(step1Log)
]);
var result = pipeline.process(element);
expect(step0Log).toEqual(['1', '1<2', '2<3']);
expect(step1Log).toEqual(['1', '1<wrap0#0', 'wrap0#0<wrap0#1', 'wrap0#1<2', '2<3']);
expect(resultIdLog(result)).toEqual(['1', 'wrap0#0', 'wrap0#1', '2', '3']);
});
});
describe('control.addChild', () => {
it('should report the new child to all processors and the result', () => {
var element = el('<div id="1"><div id="2"></div></div>');
var resultLog = [];
var newChild = new CompileElement(el('<div id="3"></div>'));
var pipeline = new CompilePipeline([
new MockStep((parent, current, control) => {
if (StringWrapper.equals(DOM.getAttribute(current.element, 'id'), '1')) {
control.addChild(newChild);
}
}),
createLoggerStep(resultLog)
]);
var result = pipeline.process(element);
expect(result[2]).toBe(newChild);
expect(resultLog).toEqual(['1', '1<2', '1<3']);
expect(resultIdLog(result)).toEqual(['1', '2', '3']);
});
});
});
}
class MockStep extends CompileStep {
processClosure:Function;
constructor(process) {
super();
this.processClosure = process;
}
process(parent:CompileElement, current:CompileElement, control:CompileControl) {
this.processClosure(parent, current, control);
}
}
export class IgnoreChildrenStep extends CompileStep {
process(parent:CompileElement, current:CompileElement, control:CompileControl) {
var attributeMap = DOM.attributeMap(current.element);
if (MapWrapper.contains(attributeMap, 'ignore-children')) {
current.compileChildren = false;
}
}
}
class IgnoreCurrentElementStep extends CompileStep {
process(parent:CompileElement, current:CompileElement, control:CompileControl) {
var attributeMap = DOM.attributeMap(current.element);
if (MapWrapper.contains(attributeMap, 'ignore-current')) {
control.ignoreCurrentElement();
}
}
}
function logEntry(log, parent, current) {
var parentId = '';
if (isPresent(parent)) {
parentId = DOM.getAttribute(parent.element, 'id') + '<';
}
ListWrapper.push(log, parentId + DOM.getAttribute(current.element, 'id'));
}
function createLoggerStep(log) {
return new MockStep((parent, current, control) => {
logEntry(log, parent, current);
});
}
function createWrapperStep(wrapperId, log) {
var nextElementId = 0;
return new MockStep((parent, current, control) => {
var parentCountStr = DOM.getAttribute(current.element, wrapperId);
if (isPresent(parentCountStr)) {
var parentCount = NumberWrapper.parseInt(parentCountStr, 10);
while (parentCount > 0) {
control.addParent(new CompileElement(el(`<a id="${wrapperId}#${nextElementId++}"></a>`)));
parentCount--;
}
}
logEntry(log, parent, current);
});
}
function resultIdLog(result) {
var idLog = [];
ListWrapper.forEach(result, (current) => {
logEntry(idLog, null, current);
});
return idLog;
}
| modules/angular2/test/render/dom/compiler/pipeline_spec.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.977064847946167,
0.03928210213780403,
0.00016659397806506604,
0.000171972656971775,
0.19142413139343262
]
|
{
"id": 2,
"code_window": [
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n",
" ` ORIGINAL ERROR: ${originalException}`;\n",
" });\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" ` ORIGINAL ERROR: ${cause}`;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 147
} | import {ListWrapper, List} from 'angular2/src/facade/collection';
import {stringify} from 'angular2/src/facade/lang';
function findFirstClosedCycle(keys:List) {
var res = [];
for(var i = 0; i < keys.length; ++i) {
if (ListWrapper.contains(res, keys[i])) {
ListWrapper.push(res, keys[i]);
return res;
} else {
ListWrapper.push(res, keys[i]);
}
}
return res;
}
function constructResolvingPath(keys:List) {
if (keys.length > 1) {
var reversed = findFirstClosedCycle(ListWrapper.reversed(keys));
var tokenStrs = ListWrapper.map(reversed, (k) => stringify(k.token));
return " (" + tokenStrs.join(' -> ') + ")";
} else {
return "";
}
}
/**
* Base class for all errors arising from misconfigured bindings.
*
* @exportedAs angular2/di_errors
*/
export class AbstractBindingError extends Error {
keys:List;
constructResolvingMessage:Function;
message;
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key, constructResolvingMessage:Function) {
super();
this.keys = [key];
this.constructResolvingMessage = constructResolvingMessage;
this.message = this.constructResolvingMessage(this.keys);
}
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
addKey(key) {
ListWrapper.push(this.keys, key);
this.message = this.constructResolvingMessage(this.keys);
}
toString() {
return this.message;
}
}
/**
* Thrown when trying to retrieve a dependency by `Key` from {@link Injector}, but the {@link Injector} does not have a
* {@link Binding} for {@link Key}.
*
* @exportedAs angular2/di_errors
*/
export class NoBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `No provider for ${first}!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when trying to retrieve an async {@link Binding} using the sync API.
*
* ## Example
*
* ```javascript
* var injector = Injector.resolveAndCreate([
* bind(Number).toAsyncFactory(() => {
* return new Promise((resolve) => resolve(1 + 2));
* }),
* bind(String).toFactory((v) => { return "Value: " + v; }, [String])
* ]);
*
* injector.asyncGet(String).then((v) => expect(v).toBe('Value: 3'));
* expect(() => {
* injector.get(String);
* }).toThrowError(AsycBindingError);
* ```
*
* The above example throws because `String` depends on `Number` which is async. If any binding in the dependency
* graph is async then the graph can only be retrieved using the `asyncGet` API.
*
* @exportedAs angular2/di_errors
*/
export class AsyncBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Cannot instantiate ${first} synchronously. ` +
`It is provided as a promise!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when dependencies form a cycle.
*
* ## Example:
*
* ```javascript
* class A {
* constructor(b:B) {}
* }
* class B {
* constructor(a:A) {}
* }
* ```
*
* Retrieving `A` or `B` throws a `CyclicDependencyError` as the graph above cannot be constructed.
*
* @exportedAs angular2/di_errors
*/
export class CyclicDependencyError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
return `Cannot instantiate cyclic dependency!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when a constructing type returns with an Error.
*
* The `InstantiationError` class contains the original error plus the dependency graph which caused this object to be
* instantiated.
*
* @exportedAs angular2/di_errors
*/
export class InstantiationError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(originalException, key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +
` ORIGINAL ERROR: ${originalException}`;
});
}
}
/**
* Thrown when an object other then {@link Binding} (or `Type`) is passed to {@link Injector} creation.
*
* @exportedAs angular2/di_errors
*/
export class InvalidBindingError extends Error {
message:string;
constructor(binding) {
super();
this.message = `Invalid binding ${binding}`;
}
toString() {
return this.message;
}
}
/**
* Thrown when the class has no annotation information.
*
* Lack of annotation information prevents the {@link Injector} from determining which dependencies need to be injected into
* the constructor.
*
* @exportedAs angular2/di_errors
*/
export class NoAnnotationError extends Error {
message:string;
constructor(typeOrFunc) {
super();
this.message = `Cannot resolve all parameters for ${stringify(typeOrFunc)}.` +
` Make sure they all have valid type or annotations.`;
}
toString() {
return this.message;
}
}
| modules/angular2/src/di/exceptions.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.9982818365097046,
0.2121061235666275,
0.00016517045150976628,
0.001461552456021309,
0.40411749482154846
]
|
{
"id": 2,
"code_window": [
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n",
" ` ORIGINAL ERROR: ${originalException}`;\n",
" });\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" ` ORIGINAL ERROR: ${cause}`;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 147
} | import {int, global, isPresent} from 'angular2/src/facade/lang';
import {List} from 'angular2/src/facade/collection';
import Rx from 'rx/dist/rx.all';
export var Promise = global.Promise;
export class PromiseWrapper {
static resolve(obj):Promise {
return Promise.resolve(obj);
}
static reject(obj):Promise {
return Promise.reject(obj);
}
// Note: We can't rename this method into `catch`, as this is not a valid
// method name in Dart.
static catchError(promise:Promise, onError:Function):Promise {
return promise.catch(onError);
}
static all(promises:List):Promise {
if (promises.length == 0) return Promise.resolve([]);
return Promise.all(promises);
}
static then(promise:Promise, success:Function, rejection:Function):Promise {
return promise.then(success, rejection);
}
static completer() {
var resolve;
var reject;
var p = new Promise(function(res, rej) {
resolve = res;
reject = rej;
});
return {
promise: p,
resolve: resolve,
reject: reject
};
}
static setTimeout(fn:Function, millis:int) {
global.setTimeout(fn, millis);
}
static isPromise(maybePromise):boolean {
return maybePromise instanceof Promise;
}
}
export class ObservableWrapper {
static subscribe(emitter:EventEmitter, onNext, onThrow = null, onReturn = null) {
return emitter.observer({next: onNext, throw: onThrow, return: onReturn});
}
static dispose(subscription:any) {
subscription.dispose();
}
static isObservable(obs):boolean {
return obs instanceof Observable;
}
static callNext(emitter:EventEmitter, value:any) {
emitter.next(value);
}
static callThrow(emitter:EventEmitter, error:any) {
emitter.throw(error);
}
static callReturn(emitter:EventEmitter) {
emitter.return();
}
}
//TODO: vsavkin change to interface
export class Observable {
observer(generator:Function){}
}
/**
* Use Rx.Observable but provides an adapter to make it work as specified here:
* https://github.com/jhusain/observable-spec
*
* Once a reference implementation of the spec is available, switch to it.
*/
export class EventEmitter extends Observable {
_subject:Rx.Subject;
constructor() {
super();
this._subject = new Rx.Subject();
}
observer(generator) {
// Rx.Scheduler.immediate and setTimeout is a workaround, so Rx works with zones.js.
// Once https://github.com/angular/zone.js/issues/51 is fixed, the hack should be removed.
return this._subject.observeOn(Rx.Scheduler.immediate).subscribe(
(value) => {setTimeout(() => generator.next(value));},
(error) => generator.throw ? generator.throw(error) : null,
() => generator.return ? generator.return() : null
);
}
toRx():Rx.Observable {
return this._subject;
}
next(value) {
this._subject.onNext(value);
}
throw(error) {
this._subject.onError(error);
}
return(value) {
this._subject.onCompleted();
}
} | modules/angular2/src/facade/async.es6 | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00021482606825884432,
0.000173978041857481,
0.00016639078967273235,
0.00017059585661627352,
0.000012064221664331853
]
|
{
"id": 2,
"code_window": [
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n",
" ` ORIGINAL ERROR: ${originalException}`;\n",
" });\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" ` ORIGINAL ERROR: ${cause}`;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 147
} | library dinner.soup.ng_deps.dart;
import 'package:angular2/src/core/annotations/annotations.dart';
import 'soup.dart';
bool _visited = false;
void initReflector(reflector) {
if (_visited) return;
_visited = true;
reflector
..registerType(SoupComponent, {
'factory': () => new SoupComponent(),
'parameters': const [],
'annotations': const [
const Component(
componentServices: const [SaladComponent],
properties: const {'menu': 'menu'})
]
})
..registerType(SaladComponent, {
'factory': () => new SaladComponent(),
'parameters': const [],
'annotations': const [const Component(properties: const {'menu': 'menu'})]
})
..registerSetters({'menu': (o, v) => o.menu = v});
}
| modules/angular2/test/transform/bind_generator/duplicate_bind_name_files/expected/soup.ng_deps.dart | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017380662029609084,
0.00016836826398503035,
0.00016518431948497891,
0.00016611383762210608,
0.00000386418014386436
]
|
{
"id": 2,
"code_window": [
" super(key, function (keys:List) {\n",
" var first = stringify(ListWrapper.first(keys).token);\n",
" return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +\n",
" ` ORIGINAL ERROR: ${originalException}`;\n",
" });\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
" ` ORIGINAL ERROR: ${cause}`;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "replace",
"edit_start_line_idx": 147
} | import 'dart:html';
Rectangle createRectangle(left, top, width, height) {
return new Rectangle(left, top, width, height);
}
| modules/angular2/test/services/rectangle_mock.dart | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017226724594365805,
0.00017226724594365805,
0.00017226724594365805,
0.00017226724594365805,
0
]
|
{
"id": 3,
"code_window": [
" });\n",
" }\n",
"}\n",
"\n",
"/**\n"
],
"labels": [
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" this.cause = cause;\n",
" this.causeKey = key;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 149
} | import {ListWrapper, List} from 'angular2/src/facade/collection';
import {stringify} from 'angular2/src/facade/lang';
function findFirstClosedCycle(keys:List) {
var res = [];
for(var i = 0; i < keys.length; ++i) {
if (ListWrapper.contains(res, keys[i])) {
ListWrapper.push(res, keys[i]);
return res;
} else {
ListWrapper.push(res, keys[i]);
}
}
return res;
}
function constructResolvingPath(keys:List) {
if (keys.length > 1) {
var reversed = findFirstClosedCycle(ListWrapper.reversed(keys));
var tokenStrs = ListWrapper.map(reversed, (k) => stringify(k.token));
return " (" + tokenStrs.join(' -> ') + ")";
} else {
return "";
}
}
/**
* Base class for all errors arising from misconfigured bindings.
*
* @exportedAs angular2/di_errors
*/
export class AbstractBindingError extends Error {
keys:List;
constructResolvingMessage:Function;
message;
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key, constructResolvingMessage:Function) {
super();
this.keys = [key];
this.constructResolvingMessage = constructResolvingMessage;
this.message = this.constructResolvingMessage(this.keys);
}
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
addKey(key) {
ListWrapper.push(this.keys, key);
this.message = this.constructResolvingMessage(this.keys);
}
toString() {
return this.message;
}
}
/**
* Thrown when trying to retrieve a dependency by `Key` from {@link Injector}, but the {@link Injector} does not have a
* {@link Binding} for {@link Key}.
*
* @exportedAs angular2/di_errors
*/
export class NoBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `No provider for ${first}!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when trying to retrieve an async {@link Binding} using the sync API.
*
* ## Example
*
* ```javascript
* var injector = Injector.resolveAndCreate([
* bind(Number).toAsyncFactory(() => {
* return new Promise((resolve) => resolve(1 + 2));
* }),
* bind(String).toFactory((v) => { return "Value: " + v; }, [String])
* ]);
*
* injector.asyncGet(String).then((v) => expect(v).toBe('Value: 3'));
* expect(() => {
* injector.get(String);
* }).toThrowError(AsycBindingError);
* ```
*
* The above example throws because `String` depends on `Number` which is async. If any binding in the dependency
* graph is async then the graph can only be retrieved using the `asyncGet` API.
*
* @exportedAs angular2/di_errors
*/
export class AsyncBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Cannot instantiate ${first} synchronously. ` +
`It is provided as a promise!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when dependencies form a cycle.
*
* ## Example:
*
* ```javascript
* class A {
* constructor(b:B) {}
* }
* class B {
* constructor(a:A) {}
* }
* ```
*
* Retrieving `A` or `B` throws a `CyclicDependencyError` as the graph above cannot be constructed.
*
* @exportedAs angular2/di_errors
*/
export class CyclicDependencyError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
return `Cannot instantiate cyclic dependency!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when a constructing type returns with an Error.
*
* The `InstantiationError` class contains the original error plus the dependency graph which caused this object to be
* instantiated.
*
* @exportedAs angular2/di_errors
*/
export class InstantiationError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(originalException, key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +
` ORIGINAL ERROR: ${originalException}`;
});
}
}
/**
* Thrown when an object other then {@link Binding} (or `Type`) is passed to {@link Injector} creation.
*
* @exportedAs angular2/di_errors
*/
export class InvalidBindingError extends Error {
message:string;
constructor(binding) {
super();
this.message = `Invalid binding ${binding}`;
}
toString() {
return this.message;
}
}
/**
* Thrown when the class has no annotation information.
*
* Lack of annotation information prevents the {@link Injector} from determining which dependencies need to be injected into
* the constructor.
*
* @exportedAs angular2/di_errors
*/
export class NoAnnotationError extends Error {
message:string;
constructor(typeOrFunc) {
super();
this.message = `Cannot resolve all parameters for ${stringify(typeOrFunc)}.` +
` Make sure they all have valid type or annotations.`;
}
toString() {
return this.message;
}
}
| modules/angular2/src/di/exceptions.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00026580647681839764,
0.0001807595690479502,
0.00016472378047183156,
0.0001691928191576153,
0.000025031462428160012
]
|
{
"id": 3,
"code_window": [
" });\n",
" }\n",
"}\n",
"\n",
"/**\n"
],
"labels": [
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" this.cause = cause;\n",
" this.causeKey = key;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 149
} | Language: JavaScript
BasedOnStyle: Google
ColumnLimit: 100
| .clang-format | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.0001704249734757468,
0.0001704249734757468,
0.0001704249734757468,
0.0001704249734757468,
0
]
|
{
"id": 3,
"code_window": [
" });\n",
" }\n",
"}\n",
"\n",
"/**\n"
],
"labels": [
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" this.cause = cause;\n",
" this.causeKey = key;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 149
} | import {DOM} from 'angular2/src/dom/dom_adapter';
import {StringMapWrapper} from 'angular2/src/facade/collection';
import {bind} from 'angular2/di';
import {createTestInjector, FunctionWithParamTokens, inject} from './test_injector';
export {inject} from './test_injector';
export {proxy} from 'rtts_assert/rtts_assert';
var _global = typeof window === 'undefined' ? global : window;
export var afterEach = _global.afterEach;
export var expect = _global.expect;
export var IS_DARTIUM = false;
export class AsyncTestCompleter {
_done: Function;
constructor(done: Function) {
this._done = done;
}
done() {
this._done();
}
}
var jsmBeforeEach = _global.beforeEach;
var jsmDescribe = _global.describe;
var jsmDDescribe = _global.ddescribe;
var jsmXDescribe = _global.xdescribe;
var jsmIt = _global.it;
var jsmIIt = _global.iit;
var jsmXIt = _global.xit;
var runnerStack = [];
var inIt = false;
var testBindings;
class BeforeEachRunner {
constructor(parent: BeforeEachRunner) {
this._fns = [];
this._parent = parent;
}
beforeEach(fn: FunctionWithParamTokens) {
this._fns.push(fn);
}
run(injector) {
if (this._parent) this._parent.run();
this._fns.forEach((fn) => fn.execute(injector));
}
}
// Reset the test bindings before each test
jsmBeforeEach(() => { testBindings = []; });
function _describe(jsmFn, ...args) {
var parentRunner = runnerStack.length === 0 ? null : runnerStack[runnerStack.length - 1];
var runner = new BeforeEachRunner(parentRunner);
runnerStack.push(runner);
var suite = jsmFn(...args);
runnerStack.pop();
return suite;
}
export function describe(...args) {
return _describe(jsmDescribe, ...args);
}
export function ddescribe(...args) {
return _describe(jsmDDescribe, ...args);
}
export function xdescribe(...args) {
return _describe(jsmXDescribe, ...args);
}
export function beforeEach(fn) {
if (runnerStack.length > 0) {
// Inside a describe block, beforeEach() uses a BeforeEachRunner
var runner = runnerStack[runnerStack.length - 1];
if (!(fn instanceof FunctionWithParamTokens)) {
fn = inject([], fn);
}
runner.beforeEach(fn);
} else {
// Top level beforeEach() are delegated to jasmine
jsmBeforeEach(fn);
}
}
/**
* Allows overriding default bindings defined in test_injector.js.
*
* The given function must return a list of DI bindings.
*
* Example:
*
* beforeEachBindings(() => [
* bind(Compiler).toClass(MockCompiler),
* bind(SomeToken).toValue(myValue),
* ]);
*/
export function beforeEachBindings(fn) {
jsmBeforeEach(() => {
var bindings = fn();
if (!bindings) return;
testBindings = [...testBindings, ...bindings];
});
}
function _it(jsmFn, name, fn) {
var runner = runnerStack[runnerStack.length - 1];
jsmFn(name, function(done) {
var async = false;
var completerBinding = bind(AsyncTestCompleter).toFactory(() => {
// Mark the test as async when an AsyncTestCompleter is injected in an it()
if (!inIt) throw new Error('AsyncTestCompleter can only be injected in an "it()"');
async = true;
return new AsyncTestCompleter(done);
});
var injector = createTestInjector([...testBindings, completerBinding]);
runner.run(injector);
if (!(fn instanceof FunctionWithParamTokens)) {
fn = inject([], fn);
}
inIt = true;
fn.execute(injector);
inIt = false;
if (!async) done();
});
}
export function it(name, fn) {
return _it(jsmIt, name, fn);
}
export function xit(name, fn) {
return _it(jsmXIt, name, fn);
}
export function iit(name, fn) {
return _it(jsmIIt, name, fn);
}
// To make testing consistent between dart and js
_global.print = function(msg) {
if (_global.dump) {
_global.dump(msg);
} else {
_global.console.log(msg);
}
};
// Some Map polyfills don't polyfill Map.toString correctly, which
// gives us bad error messages in tests.
// The only way to do this in Jasmine is to monkey patch a method
// to the object :-(
_global.Map.prototype.jasmineToString = function() {
var m = this;
if (!m) {
return ''+m;
}
var res = [];
m.forEach( (v,k) => {
res.push(`${k}:${v}`);
});
return `{ ${res.join(',')} }`;
}
_global.beforeEach(function() {
jasmine.addMatchers({
// Custom handler for Map as Jasmine does not support it yet
toEqual: function(util, customEqualityTesters) {
return {
compare: function(actual, expected) {
return {
pass: util.equals(actual, expected, [compareMap])
};
}
};
function compareMap(actual, expected) {
if (actual instanceof Map) {
var pass = actual.size === expected.size;
if (pass) {
actual.forEach( (v,k) => {
pass = pass && util.equals(v, expected.get(k));
});
}
return pass;
} else {
return undefined;
}
}
},
toBePromise: function() {
return {
compare: function (actual, expectedClass) {
var pass = typeof actual === 'object' && typeof actual.then === 'function';
return {
pass: pass,
get message() {
return 'Expected ' + actual + ' to be a promise';
}
};
}
};
},
toBeAnInstanceOf: function() {
return {
compare: function(actual, expectedClass) {
var pass = typeof actual === 'object' && actual instanceof expectedClass;
return {
pass: pass,
get message() {
return 'Expected ' + actual + ' to be an instance of ' + expectedClass;
}
};
}
};
},
toHaveText: function() {
return {
compare: function(actual, expectedText) {
var actualText = elementText(actual);
return {
pass: actualText == expectedText,
get message() {
return 'Expected ' + actualText + ' to be equal to ' + expectedText;
}
};
}
};
},
toImplement: function() {
return {
compare: function(actualObject, expectedInterface) {
var objProps = Object.keys(actualObject.constructor.prototype);
var intProps = Object.keys(expectedInterface.prototype);
var missedMethods = [];
intProps.forEach((k) => {
if (!actualObject.constructor.prototype[k]) missedMethods.push(k);
});
return {
pass: missedMethods.length == 0,
get message() {
return 'Expected ' + actualObject + ' to have the following methods: ' + missedMethods.join(", ");
}
};
}
};
}
});
});
export class SpyObject {
constructor(type = null) {
if (type) {
for (var prop in type.prototype) {
var m = type.prototype[prop];
if (typeof m === 'function') {
this.spy(prop);
}
}
}
}
spy(name){
if (! this[name]) {
this[name] = this._createGuinnessCompatibleSpy();
}
return this[name];
}
static stub(object = null, config = null, overrides = null) {
if (!(object instanceof SpyObject)) {
overrides = config;
config = object;
object = new SpyObject();
}
var m = StringMapWrapper.merge(config, overrides);
StringMapWrapper.forEach(m, (value, key) => {
object.spy(key).andReturn(value);
});
return object;
}
rttsAssert(value) {
return true;
}
_createGuinnessCompatibleSpy(){
var newSpy = jasmine.createSpy();
newSpy.andCallFake = newSpy.and.callFake;
newSpy.andReturn = newSpy.and.returnValue;
// return null by default to satisfy our rtts asserts
newSpy.and.returnValue(null);
return newSpy;
}
}
function elementText(n) {
var hasNodes = (n) => {var children = DOM.childNodes(n); return children && children.length > 0;}
if (n instanceof Array) {
return n.map((nn) => elementText(nn)).join("");
}
if (DOM.isCommentNode(n)) {
return '';
}
if (DOM.isElementNode(n) && DOM.tagName(n) == 'CONTENT') {
return elementText(Array.prototype.slice.apply(DOM.getDistributedNodes(n)));
}
if (DOM.hasShadowRoot(n)) {
return elementText(DOM.childNodesAsList(DOM.getShadowRoot(n)));
}
if (hasNodes(n)) {
return elementText(DOM.childNodesAsList(n));
}
return DOM.getText(n);
}
| modules/angular2/src/test_lib/test_lib.es6 | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.0002054832293651998,
0.00017000055231619626,
0.00016353829414583743,
0.0001686215546214953,
0.000006502843916678103
]
|
{
"id": 3,
"code_window": [
" });\n",
" }\n",
"}\n",
"\n",
"/**\n"
],
"labels": [
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" this.cause = cause;\n",
" this.causeKey = key;\n"
],
"file_path": "modules/angular2/src/di/exceptions.js",
"type": "add",
"edit_start_line_idx": 149
} | library examples.hello_world.index_common_dart.ng_deps.dart;
import 'hello.dart';
import 'package:angular2/angular2.dart';
bool _visited = false;
void initReflector(reflector) {
if (_visited) return;
_visited = true;
reflector
..registerType(HelloCmp, {
'factory': () => new HelloCmp(),
'parameters': const [const []],
'annotations': const [const Decorator(compileChildren: true)]
});
}
| modules/angular2/test/transform/template_compiler/directive_metadata_files/compile_children.ng_deps.dart | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00016966034309007227,
0.00016958531341515481,
0.00016951028374023736,
0.00016958531341515481,
7.502967491745949e-8
]
|
{
"id": 4,
"code_window": [
"import {isBlank} from 'angular2/src/facade/lang';\n",
"import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';\n",
"import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';\n",
"\n"
],
"labels": [
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"import {isBlank, BaseException} from 'angular2/src/facade/lang';\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 0
} | import {ListWrapper, List} from 'angular2/src/facade/collection';
import {stringify} from 'angular2/src/facade/lang';
function findFirstClosedCycle(keys:List) {
var res = [];
for(var i = 0; i < keys.length; ++i) {
if (ListWrapper.contains(res, keys[i])) {
ListWrapper.push(res, keys[i]);
return res;
} else {
ListWrapper.push(res, keys[i]);
}
}
return res;
}
function constructResolvingPath(keys:List) {
if (keys.length > 1) {
var reversed = findFirstClosedCycle(ListWrapper.reversed(keys));
var tokenStrs = ListWrapper.map(reversed, (k) => stringify(k.token));
return " (" + tokenStrs.join(' -> ') + ")";
} else {
return "";
}
}
/**
* Base class for all errors arising from misconfigured bindings.
*
* @exportedAs angular2/di_errors
*/
export class AbstractBindingError extends Error {
keys:List;
constructResolvingMessage:Function;
message;
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key, constructResolvingMessage:Function) {
super();
this.keys = [key];
this.constructResolvingMessage = constructResolvingMessage;
this.message = this.constructResolvingMessage(this.keys);
}
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
addKey(key) {
ListWrapper.push(this.keys, key);
this.message = this.constructResolvingMessage(this.keys);
}
toString() {
return this.message;
}
}
/**
* Thrown when trying to retrieve a dependency by `Key` from {@link Injector}, but the {@link Injector} does not have a
* {@link Binding} for {@link Key}.
*
* @exportedAs angular2/di_errors
*/
export class NoBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `No provider for ${first}!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when trying to retrieve an async {@link Binding} using the sync API.
*
* ## Example
*
* ```javascript
* var injector = Injector.resolveAndCreate([
* bind(Number).toAsyncFactory(() => {
* return new Promise((resolve) => resolve(1 + 2));
* }),
* bind(String).toFactory((v) => { return "Value: " + v; }, [String])
* ]);
*
* injector.asyncGet(String).then((v) => expect(v).toBe('Value: 3'));
* expect(() => {
* injector.get(String);
* }).toThrowError(AsycBindingError);
* ```
*
* The above example throws because `String` depends on `Number` which is async. If any binding in the dependency
* graph is async then the graph can only be retrieved using the `asyncGet` API.
*
* @exportedAs angular2/di_errors
*/
export class AsyncBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Cannot instantiate ${first} synchronously. ` +
`It is provided as a promise!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when dependencies form a cycle.
*
* ## Example:
*
* ```javascript
* class A {
* constructor(b:B) {}
* }
* class B {
* constructor(a:A) {}
* }
* ```
*
* Retrieving `A` or `B` throws a `CyclicDependencyError` as the graph above cannot be constructed.
*
* @exportedAs angular2/di_errors
*/
export class CyclicDependencyError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
return `Cannot instantiate cyclic dependency!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when a constructing type returns with an Error.
*
* The `InstantiationError` class contains the original error plus the dependency graph which caused this object to be
* instantiated.
*
* @exportedAs angular2/di_errors
*/
export class InstantiationError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(originalException, key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +
` ORIGINAL ERROR: ${originalException}`;
});
}
}
/**
* Thrown when an object other then {@link Binding} (or `Type`) is passed to {@link Injector} creation.
*
* @exportedAs angular2/di_errors
*/
export class InvalidBindingError extends Error {
message:string;
constructor(binding) {
super();
this.message = `Invalid binding ${binding}`;
}
toString() {
return this.message;
}
}
/**
* Thrown when the class has no annotation information.
*
* Lack of annotation information prevents the {@link Injector} from determining which dependencies need to be injected into
* the constructor.
*
* @exportedAs angular2/di_errors
*/
export class NoAnnotationError extends Error {
message:string;
constructor(typeOrFunc) {
super();
this.message = `Cannot resolve all parameters for ${stringify(typeOrFunc)}.` +
` Make sure they all have valid type or annotations.`;
}
toString() {
return this.message;
}
}
| modules/angular2/src/di/exceptions.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.0032412291038781404,
0.00038999508251436055,
0.00016289521590806544,
0.00018684826500248164,
0.0006795173976570368
]
|
{
"id": 4,
"code_window": [
"import {isBlank} from 'angular2/src/facade/lang';\n",
"import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';\n",
"import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';\n",
"\n"
],
"labels": [
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"import {isBlank, BaseException} from 'angular2/src/facade/lang';\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 0
} | import {Parser as TraceurParser} from 'traceur/src/syntax/Parser';
import {SyntaxErrorReporter} from 'traceur/src/util/SyntaxErrorReporter';
import {TypeName, ImportSpecifier, ImportedBinding, BindingIdentifier} from 'traceur/src/syntax/trees/ParseTrees';
import {PERIOD, IMPORT, STAR, AS, FROM, CLOSE_ANGLE, OPEN_ANGLE, COMMA, OPEN_CURLY, CLOSE_CURLY, COLON} from 'traceur/src/syntax/TokenType';
export class Parser extends TraceurParser {
constructor(file, errorReporter = new SyntaxErrorReporter(), options) {
super(file, errorReporter, options);
}
// TODO: add support for object type literals to traceur!
parseObjectType_() {
this.eat_(OPEN_CURLY);
do {
var identifier = this.eatId_();
this.eat_(COLON);
var type = this.parseNamedOrPredefinedType_();
var typeParameters = this.parseTypeParametersOpt_();
// TODO(misko): save the type information
} while (this.eatIf_(COMMA));
this.eat_(CLOSE_CURLY);
}
}
| tools/transpiler/src/parser.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017224294424522668,
0.00016867202066350728,
0.0001658104738453403,
0.00016796265845187008,
0.0000026735206120065413
]
|
{
"id": 4,
"code_window": [
"import {isBlank} from 'angular2/src/facade/lang';\n",
"import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';\n",
"import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';\n",
"\n"
],
"labels": [
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"import {isBlank, BaseException} from 'angular2/src/facade/lang';\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 0
} | var perfUtil = require('angular2/src/test_lib/perf_util');
describe('ng2 largetable benchmark', function () {
var URL = 'benchmarks/src/largetable/largetable_benchmark.html';
afterEach(perfUtil.verifyNoBrowserErrors);
// Not yet implemented:
// 'ngBind',
// 'ngBindOnce',
// 'ngBindFn',
// 'ngBindFilter',
// 'interpolationFilter'
[
'interpolation',
'interpolationAttr',
'interpolationFn'
].forEach(function(benchmarkType) {
it('should log the ng stats with: ' + benchmarkType, function() {
console.log('executing for type', benchmarkType);
perfUtil.runClickBenchmark({
url: URL,
buttons: ['#ng2DestroyDom', '#ng2CreateDom'],
id: 'ng2.largetable.' + benchmarkType,
params: [{
name: 'rows',
value: 20,
scale: 'sqrt'
},{
name: 'columns',
value: 20,
scale: 'sqrt'
},{
name: 'benchmarkType',
value: benchmarkType
}]
});
});
});
it('should log the baseline stats', function() {
perfUtil.runClickBenchmark({
url: URL,
buttons: ['#baselineDestroyDom', '#baselineCreateDom'],
id: 'baseline.largetable',
params: [{
name: 'rows',
value: 100,
scale: 'sqrt'
},{
name: 'columns',
value: 20,
scale: 'sqrt'
},{
name: 'benchmarkType',
value: 'baseline'
}]
});
});
});
| modules/benchmarks/e2e_test/largetable_perf.es6 | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.0002458790841046721,
0.00018053618259727955,
0.00016471827984787524,
0.00017139526607934386,
0.00002691800909815356
]
|
{
"id": 4,
"code_window": [
"import {isBlank} from 'angular2/src/facade/lang';\n",
"import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';\n",
"import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';\n",
"\n"
],
"labels": [
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"import {isBlank, BaseException} from 'angular2/src/facade/lang';\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 0
} | export var Foo = 'FOO';
export var Bar = 'BAR';
| tools/transpiler/spec/foo.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017352146096527576,
0.00017352146096527576,
0.00017352146096527576,
0.00017352146096527576,
0
]
|
{
"id": 5,
"code_window": [
"class Engine {\n",
"}\n",
"\n",
"class BrokenEngine {\n",
" constructor() {\n",
" throw \"Broken Engine\";\n",
" }\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" throw new BaseException(\"Broken Engine\");\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 9
} | import {isBlank} from 'angular2/src/facade/lang';
import {describe, ddescribe, it, iit, expect, beforeEach} from 'angular2/test_lib';
import {Injector, Inject, InjectLazy, Optional, bind, ResolvedBinding} from 'angular2/di';
class Engine {
}
class BrokenEngine {
constructor() {
throw "Broken Engine";
}
}
class DashboardSoftware {
}
class Dashboard {
constructor(software: DashboardSoftware) {}
}
class TurboEngine extends Engine {
}
class Car {
engine:Engine;
constructor(engine:Engine) {
this.engine = engine;
}
}
class CarWithLazyEngine {
engineFactory;
constructor(@InjectLazy(Engine) engineFactory) {
this.engineFactory = engineFactory;
}
}
class CarWithOptionalEngine {
engine;
constructor(@Optional() engine:Engine) {
this.engine = engine;
}
}
class CarWithDashboard {
engine:Engine;
dashboard:Dashboard;
constructor(engine:Engine, dashboard:Dashboard) {
this.engine = engine;
this.dashboard = dashboard;
}
}
class SportsCar extends Car {
engine:Engine;
constructor(engine:Engine) {
super(engine);
}
}
class CarWithInject {
engine:Engine;
constructor(@Inject(TurboEngine) engine:Engine) {
this.engine = engine;
}
}
class CyclicEngine {
constructor(car:Car) {}
}
class NoAnnotations {
constructor(secretDependency) {}
}
export function main() {
describe('injector', function () {
it('should instantiate a class without dependencies', function () {
var injector = Injector.resolveAndCreate([Engine]);
var engine = injector.get(Engine);
expect(engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on type information', function () {
var injector = Injector.resolveAndCreate([Engine, Car]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should resolve dependencies based on @Inject annotation', function () {
var injector = Injector.resolveAndCreate([TurboEngine, Engine, CarWithInject]);
var car = injector.get(CarWithInject);
expect(car).toBeAnInstanceOf(CarWithInject);
expect(car.engine).toBeAnInstanceOf(TurboEngine);
});
it('should throw when no type and not @Inject', function () {
expect(() => Injector.resolveAndCreate([NoAnnotations])).toThrowError(
'Cannot resolve all parameters for NoAnnotations. '+
'Make sure they all have valid type or annotations.');
});
it('should cache instances', function () {
var injector = Injector.resolveAndCreate([Engine]);
var e1 = injector.get(Engine);
var e2 = injector.get(Engine);
expect(e1).toBe(e2);
});
it('should bind to a value', function () {
var injector = Injector.resolveAndCreate([
bind(Engine).toValue("fake engine")
]);
var engine = injector.get(Engine);
expect(engine).toEqual("fake engine");
});
it('should bind to a factory', function () {
function sportsCarFactory(e:Engine) {
return new SportsCar(e);
}
var injector = Injector.resolveAndCreate([
Engine,
bind(Car).toFactory(sportsCarFactory)
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should bind to an alias', function() {
var injector = Injector.resolveAndCreate([
Engine,
bind(SportsCar).toClass(SportsCar),
bind(Car).toAlias(SportsCar)
]);
var car = injector.get(Car);
var sportsCar = injector.get(SportsCar);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car).toBe(sportsCar);
});
it('should throw when the aliased binding does not exist', function () {
var injector = Injector.resolveAndCreate([
bind('car').toAlias(SportsCar)
]);
expect(() => injector.get('car')).toThrowError('No provider for SportsCar! (car -> SportsCar)');
});
it('should support overriding factory dependencies', function () {
var injector = Injector.resolveAndCreate([
Engine,
bind(Car).toFactory((e) => new SportsCar(e), [Engine])
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
expect(car.engine).toBeAnInstanceOf(Engine);
});
it('should support optional dependencies', function () {
var injector = Injector.resolveAndCreate([
CarWithOptionalEngine
]);
var car = injector.get(CarWithOptionalEngine);
expect(car.engine).toEqual(null);
});
it("should flatten passed-in bindings", function () {
var injector = Injector.resolveAndCreate([
[[Engine, Car]]
]);
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it("should use the last binding "+
"when there are mutliple bindings for same token", function () {
var injector = Injector.resolveAndCreate([
bind(Engine).toClass(Engine),
bind(Engine).toClass(TurboEngine)
]);
expect(injector.get(Engine)).toBeAnInstanceOf(TurboEngine);
});
it('should use non-type tokens', function () {
var injector = Injector.resolveAndCreate([
bind('token').toValue('value')
]);
expect(injector.get('token')).toEqual('value');
});
it('should throw when given invalid bindings', function () {
expect(() => Injector.resolveAndCreate(["blah"])).toThrowError('Invalid binding blah');
expect(() => Injector.resolveAndCreate([bind("blah")])).toThrowError('Invalid binding blah');
});
it('should provide itself', function () {
var parent = Injector.resolveAndCreate([]);
var child = parent.resolveAndCreateChild([]);
expect(child.get(Injector)).toBe(child);
});
it('should throw when no provider defined', function () {
var injector = Injector.resolveAndCreate([]);
expect(() => injector.get('NonExisting')).toThrowError('No provider for NonExisting!');
});
it('should show the full path when no provider', function () {
var injector = Injector.resolveAndCreate([CarWithDashboard, Engine, Dashboard]);
expect(() => injector.get(CarWithDashboard)).
toThrowError('No provider for DashboardSoftware! (CarWithDashboard -> Dashboard -> DashboardSoftware)');
});
it('should throw when trying to instantiate a cyclic dependency', function () {
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toClass(CyclicEngine)
]);
expect(() => injector.get(Car))
.toThrowError('Cannot instantiate cyclic dependency! (Car -> Engine -> Car)');
expect(() => injector.asyncGet(Car))
.toThrowError('Cannot instantiate cyclic dependency! (Car -> Engine -> Car)');
});
it('should show the full path when error happens in a constructor', function () {
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toClass(BrokenEngine)
]);
try {
injector.get(Car);
throw "Must throw";
} catch (e) {
expect(e.message).toContain("Error during instantiation of Engine! (Car -> Engine)");
}
});
it('should instantiate an object after a failed attempt', function () {
var isBroken = true;
var injector = Injector.resolveAndCreate([
Car,
bind(Engine).toFactory(() => isBroken ? new BrokenEngine() : new Engine())
]);
expect(() => injector.get(Car)).toThrowError(new RegExp("Error"));
isBroken = false;
expect(injector.get(Car)).toBeAnInstanceOf(Car);
});
it('should support null values', () => {
var injector = Injector.resolveAndCreate([bind('null').toValue(null)]);
expect(injector.get('null')).toBe(null);
});
describe("default bindings", function () {
it("should be used when no matching binding found", function () {
var injector = Injector.resolveAndCreate([], {defaultBindings: true});
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(Car);
});
it("should use the matching binding when it is available", function () {
var injector = Injector.resolveAndCreate([
bind(Car).toClass(SportsCar)
], {defaultBindings: true});
var car = injector.get(Car);
expect(car).toBeAnInstanceOf(SportsCar);
});
});
describe("child", function () {
it('should load instances from parent injector', function () {
var parent = Injector.resolveAndCreate([Engine]);
var child = parent.resolveAndCreateChild([]);
var engineFromParent = parent.get(Engine);
var engineFromChild = child.get(Engine);
expect(engineFromChild).toBe(engineFromParent);
});
it("should not use the child bindings when resolving the dependencies of a parent binding", function () {
var parent = Injector.resolveAndCreate([
Car, Engine
]);
var child = parent.resolveAndCreateChild([
bind(Engine).toClass(TurboEngine)
]);
var carFromChild = child.get(Car);
expect(carFromChild.engine).toBeAnInstanceOf(Engine);
});
it('should create new instance in a child injector', function () {
var parent = Injector.resolveAndCreate([Engine]);
var child = parent.resolveAndCreateChild([
bind(Engine).toClass(TurboEngine)
]);
var engineFromParent = parent.get(Engine);
var engineFromChild = child.get(Engine);
expect(engineFromParent).not.toBe(engineFromChild);
expect(engineFromChild).toBeAnInstanceOf(TurboEngine);
});
it("should create child injectors without default bindings", function () {
var parent = Injector.resolveAndCreate([], {defaultBindings: true});
var child = parent.resolveAndCreateChild([]);
//child delegates to parent the creation of Car
var childCar = child.get(Car);
var parentCar = parent.get(Car);
expect(childCar).toBe(parentCar);
});
});
describe("lazy", function () {
it("should create dependencies lazily", function () {
var injector = Injector.resolveAndCreate([
Engine,
CarWithLazyEngine
]);
var car = injector.get(CarWithLazyEngine);
expect(car.engineFactory()).toBeAnInstanceOf(Engine);
});
it("should cache instance created lazily", function () {
var injector = Injector.resolveAndCreate([
Engine,
CarWithLazyEngine
]);
var car = injector.get(CarWithLazyEngine);
var e1 = car.engineFactory();
var e2 = car.engineFactory();
expect(e1).toBe(e2);
});
});
describe('resolve', function() {
it('should resolve and flatten', function() {
var bindings = Injector.resolve([Engine, [BrokenEngine]]);
bindings.forEach(function(b) {
if (isBlank(b)) return; // the result is a sparse array
expect(b instanceof ResolvedBinding).toBe(true);
});
});
});
});
}
| modules/angular2/test/di/injector_spec.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.9993002414703369,
0.7417587041854858,
0.0001705317699816078,
0.9945691823959351,
0.43175703287124634
]
|
{
"id": 5,
"code_window": [
"class Engine {\n",
"}\n",
"\n",
"class BrokenEngine {\n",
" constructor() {\n",
" throw \"Broken Engine\";\n",
" }\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" throw new BaseException(\"Broken Engine\");\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 9
} | #!/bin/bash
set -e
CPUPATH=/sys/devices/system/cpu
WAKE_LOCK_NAME=ngperf
set_governor() {
echo "Setting CPU frequency governor to \"$1\""
adb shell 'for f in '$CPUPATH'/cpu*/cpufreq/scaling_governor ; do echo '$1' > $f; done'
}
wake_lock() {
echo "Setting wake lock $WAKE_LOCK_NAME"
adb shell "echo $WAKE_LOCK_NAME > /sys/power/wake_lock"
}
wake_unlock() {
echo "Removing wake lock $WAKE_LOCK_NAME"
adb shell "echo $WAKE_LOCK_NAME > /sys/power/wake_unlock"
}
case "$1" in
(performance)
set_governor "performance"
;;
(powersave)
set_governor "powersave"
;;
(ondemand)
set_governor "ondemand"
;;
(wakelock)
wake_lock
;;
(wakeunlock)
wake_unlock
;;
(*)
echo "Usage: $0 performance|powersave|ondemand|wakelock|wakeunlock"
exit 1
;;
esac
| scripts/ci/android_cpu.sh | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00018687233387026936,
0.00017597066471353173,
0.00016987773415166885,
0.000174868997419253,
0.000005784513632534072
]
|
{
"id": 5,
"code_window": [
"class Engine {\n",
"}\n",
"\n",
"class BrokenEngine {\n",
" constructor() {\n",
" throw \"Broken Engine\";\n",
" }\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" throw new BaseException(\"Broken Engine\");\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 9
} | var perfUtil = require('angular2/src/test_lib/perf_util');
describe('ng2 compiler benchmark', function () {
var URL = 'benchmarks/src/compiler/compiler_benchmark.html';
afterEach(perfUtil.verifyNoBrowserErrors);
it('should log withBindings stats', function(done) {
perfUtil.runBenchmark({
url: URL,
id: 'ng2.compile.withBindings',
params: [{
name: 'elements', value: 150, scale: 'linear'
}],
work: function() {
browser.executeScript('document.querySelector("#compileWithBindings").click()');
browser.sleep(500);
}
}).then(done, done.fail);
});
it('should log noBindings stats', function(done) {
perfUtil.runBenchmark({
url: URL,
id: 'ng2.compile.noBindings',
params: [{
name: 'elements', value: 150, scale: 'linear'
}],
work: function() {
browser.executeScript('document.querySelector("#compileNoBindings").click()');
browser.sleep(500);
}
}).then(done, done.fail);
});
});
| modules/benchmarks/e2e_test/compiler_perf.es6 | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017804316303227097,
0.00017199023568537086,
0.0001695686369203031,
0.0001701745786704123,
0.0000035071846014034236
]
|
{
"id": 5,
"code_window": [
"class Engine {\n",
"}\n",
"\n",
"class BrokenEngine {\n",
" constructor() {\n",
" throw \"Broken Engine\";\n",
" }\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" throw new BaseException(\"Broken Engine\");\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "replace",
"edit_start_line_idx": 9
} | import {
afterEach,
AsyncTestCompleter,
beforeEach,
ddescribe,
describe,
expect,
iit,
inject,
it,
xit,
} from 'angular2/test_lib';
import { ListWrapper } from 'angular2/src/facade/collection';
import { PromiseWrapper } from 'angular2/src/facade/async';
import { Json, isBlank, isPresent } from 'angular2/src/facade/lang';
import {
WebDriverExtension, IOsDriverExtension,
WebDriverAdapter, Injector, bind
} from 'benchpress/common';
import { TraceEventFactory } from '../trace_event_factory';
export function main() {
describe('ios driver extension', () => {
var log;
var extension;
var normEvents = new TraceEventFactory('timeline', 'pid0');
function createExtension(perfRecords = null) {
if (isBlank(perfRecords)) {
perfRecords = [];
}
log = [];
extension = Injector.resolveAndCreate([
IOsDriverExtension.BINDINGS,
bind(WebDriverAdapter).toValue(new MockDriverAdapter(log, perfRecords))
]).get(IOsDriverExtension);
return extension;
}
it('should throw on forcing gc', () => {
expect( () => createExtension().gc() ).toThrowError('Force GC is not supported on iOS');
});
it('should mark the timeline via console.time()', inject([AsyncTestCompleter], (async) => {
createExtension().timeBegin('someName').then( (_) => {
expect(log).toEqual([['executeScript', `console.time('someName');`]]);
async.done();
});
}));
it('should mark the timeline via console.timeEnd()', inject([AsyncTestCompleter], (async) => {
createExtension().timeEnd('someName').then( (_) => {
expect(log).toEqual([['executeScript', `console.timeEnd('someName');`]]);
async.done();
});
}));
it('should mark the timeline via console.time() and console.timeEnd()', inject([AsyncTestCompleter], (async) => {
createExtension().timeEnd('name1', 'name2').then( (_) => {
expect(log).toEqual([['executeScript', `console.timeEnd('name1');console.time('name2');`]]);
async.done();
});
}));
describe('readPerfLog', () => {
it('should execute a dummy script before reading them', inject([AsyncTestCompleter], (async) => {
// TODO(tbosch): This seems to be a bug in ChromeDriver:
// Sometimes it does not report the newest events of the performance log
// to the WebDriver client unless a script is executed...
createExtension([]).readPerfLog().then( (_) => {
expect(log).toEqual([ [ 'executeScript', '1+1' ], [ 'logs', 'performance' ] ]);
async.done();
});
}));
it('should report FunctionCall records as "script"', inject([AsyncTestCompleter], (async) => {
createExtension([
durationRecord('FunctionCall', 1, 5)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
normEvents.start('script', 1),
normEvents.end('script', 5)
]);
async.done();
});
}));
it('should ignore FunctionCalls from webdriver', inject([AsyncTestCompleter], (async) => {
createExtension([
internalScriptRecord(1, 5)
]).readPerfLog().then( (events) => {
expect(events).toEqual([]);
async.done();
});
}));
it('should report begin time', inject([AsyncTestCompleter], (async) => {
createExtension([
timeBeginRecord('someName', 12)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
normEvents.markStart('someName', 12)
]);
async.done();
});
}));
it('should report end timestamps', inject([AsyncTestCompleter], (async) => {
createExtension([
timeEndRecord('someName', 12)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
normEvents.markEnd('someName', 12)
]);
async.done();
});
}));
['RecalculateStyles', 'Layout', 'UpdateLayerTree', 'Paint', 'Rasterize', 'CompositeLayers'].forEach( (recordType) => {
it(`should report ${recordType}`, inject([AsyncTestCompleter], (async) => {
createExtension([
durationRecord(recordType, 0, 1)
]).readPerfLog().then( (events) => {
expect(events).toEqual([
normEvents.start('render', 0),
normEvents.end('render', 1),
]);
async.done();
});
}));
});
it('should walk children', inject([AsyncTestCompleter], (async) => {
createExtension([
durationRecord('FunctionCall', 1, 5, [
timeBeginRecord('someName', 2)
])
]).readPerfLog().then( (events) => {
expect(events).toEqual([
normEvents.start('script', 1),
normEvents.markStart('someName', 2),
normEvents.end('script', 5)
]);
async.done();
});
}));
it('should match safari browsers', () => {
expect(createExtension().supports({
'browserName': 'safari'
})).toBe(true);
expect(createExtension().supports({
'browserName': 'Safari'
})).toBe(true);
});
});
});
}
function timeBeginRecord(name, time) {
return {
'type': 'Time',
'startTime': time,
'data': {
'message': name
}
};
}
function timeEndRecord(name, time) {
return {
'type': 'TimeEnd',
'startTime': time,
'data': {
'message': name
}
};
}
function durationRecord(type, startTime, endTime, children = null) {
if (isBlank(children)) {
children = [];
}
return {
'type': type,
'startTime': startTime,
'endTime': endTime,
'children': children
};
}
function internalScriptRecord(startTime, endTime) {
return {
'type': 'FunctionCall',
'startTime': startTime,
'endTime': endTime,
'data': {
'scriptName': 'InjectedScript'
}
};
}
class MockDriverAdapter extends WebDriverAdapter {
_log:List;
_perfRecords:List;
constructor(log, perfRecords) {
super();
this._log = log;
this._perfRecords = perfRecords;
}
executeScript(script) {
ListWrapper.push(this._log, ['executeScript', script]);
return PromiseWrapper.resolve(null);
}
logs(type) {
ListWrapper.push(this._log, ['logs', type]);
if (type === 'performance') {
return PromiseWrapper.resolve(this._perfRecords.map(function(record) {
return {
'message': Json.stringify({
'message': {
'method': 'Timeline.eventRecorded',
'params': {
'record': record
}
}
})
};
}));
} else {
return null;
}
}
}
| modules/benchpress/test/webdriver/ios_driver_extension_spec.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.005255567375570536,
0.0003764960856642574,
0.00016764426254667342,
0.00017201824812218547,
0.0009959455346688628
]
|
{
"id": 6,
"code_window": [
" throw \"Must throw\";\n",
" } catch (e) {\n",
" expect(e.message).toContain(\"Error during instantiation of Engine! (Car -> Engine)\");\n",
" }\n",
" });\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(e.cause instanceof BaseException).toBeTruthy();\n",
" expect(e.causeKey.token).toEqual(Engine);\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "add",
"edit_start_line_idx": 253
} | import {ListWrapper, List} from 'angular2/src/facade/collection';
import {stringify} from 'angular2/src/facade/lang';
function findFirstClosedCycle(keys:List) {
var res = [];
for(var i = 0; i < keys.length; ++i) {
if (ListWrapper.contains(res, keys[i])) {
ListWrapper.push(res, keys[i]);
return res;
} else {
ListWrapper.push(res, keys[i]);
}
}
return res;
}
function constructResolvingPath(keys:List) {
if (keys.length > 1) {
var reversed = findFirstClosedCycle(ListWrapper.reversed(keys));
var tokenStrs = ListWrapper.map(reversed, (k) => stringify(k.token));
return " (" + tokenStrs.join(' -> ') + ")";
} else {
return "";
}
}
/**
* Base class for all errors arising from misconfigured bindings.
*
* @exportedAs angular2/di_errors
*/
export class AbstractBindingError extends Error {
keys:List;
constructResolvingMessage:Function;
message;
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key, constructResolvingMessage:Function) {
super();
this.keys = [key];
this.constructResolvingMessage = constructResolvingMessage;
this.message = this.constructResolvingMessage(this.keys);
}
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
addKey(key) {
ListWrapper.push(this.keys, key);
this.message = this.constructResolvingMessage(this.keys);
}
toString() {
return this.message;
}
}
/**
* Thrown when trying to retrieve a dependency by `Key` from {@link Injector}, but the {@link Injector} does not have a
* {@link Binding} for {@link Key}.
*
* @exportedAs angular2/di_errors
*/
export class NoBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `No provider for ${first}!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when trying to retrieve an async {@link Binding} using the sync API.
*
* ## Example
*
* ```javascript
* var injector = Injector.resolveAndCreate([
* bind(Number).toAsyncFactory(() => {
* return new Promise((resolve) => resolve(1 + 2));
* }),
* bind(String).toFactory((v) => { return "Value: " + v; }, [String])
* ]);
*
* injector.asyncGet(String).then((v) => expect(v).toBe('Value: 3'));
* expect(() => {
* injector.get(String);
* }).toThrowError(AsycBindingError);
* ```
*
* The above example throws because `String` depends on `Number` which is async. If any binding in the dependency
* graph is async then the graph can only be retrieved using the `asyncGet` API.
*
* @exportedAs angular2/di_errors
*/
export class AsyncBindingError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Cannot instantiate ${first} synchronously. ` +
`It is provided as a promise!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when dependencies form a cycle.
*
* ## Example:
*
* ```javascript
* class A {
* constructor(b:B) {}
* }
* class B {
* constructor(a:A) {}
* }
* ```
*
* Retrieving `A` or `B` throws a `CyclicDependencyError` as the graph above cannot be constructed.
*
* @exportedAs angular2/di_errors
*/
export class CyclicDependencyError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(key) {
super(key, function (keys:List) {
return `Cannot instantiate cyclic dependency!${constructResolvingPath(keys)}`;
});
}
}
/**
* Thrown when a constructing type returns with an Error.
*
* The `InstantiationError` class contains the original error plus the dependency graph which caused this object to be
* instantiated.
*
* @exportedAs angular2/di_errors
*/
export class InstantiationError extends AbstractBindingError {
// TODO(tbosch): Can't do key:Key as this results in a circular dependency!
constructor(originalException, key) {
super(key, function (keys:List) {
var first = stringify(ListWrapper.first(keys).token);
return `Error during instantiation of ${first}!${constructResolvingPath(keys)}.` +
` ORIGINAL ERROR: ${originalException}`;
});
}
}
/**
* Thrown when an object other then {@link Binding} (or `Type`) is passed to {@link Injector} creation.
*
* @exportedAs angular2/di_errors
*/
export class InvalidBindingError extends Error {
message:string;
constructor(binding) {
super();
this.message = `Invalid binding ${binding}`;
}
toString() {
return this.message;
}
}
/**
* Thrown when the class has no annotation information.
*
* Lack of annotation information prevents the {@link Injector} from determining which dependencies need to be injected into
* the constructor.
*
* @exportedAs angular2/di_errors
*/
export class NoAnnotationError extends Error {
message:string;
constructor(typeOrFunc) {
super();
this.message = `Cannot resolve all parameters for ${stringify(typeOrFunc)}.` +
` Make sure they all have valid type or annotations.`;
}
toString() {
return this.message;
}
}
| modules/angular2/src/di/exceptions.js | 1 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00377853331156075,
0.0008055566577240825,
0.0001623435236979276,
0.00017742678755894303,
0.0010813478147611022
]
|
{
"id": 6,
"code_window": [
" throw \"Must throw\";\n",
" } catch (e) {\n",
" expect(e.message).toContain(\"Error during instantiation of Engine! (Car -> Engine)\");\n",
" }\n",
" });\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(e.cause instanceof BaseException).toBeTruthy();\n",
" expect(e.causeKey.token).toEqual(Engine);\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "add",
"edit_start_line_idx": 253
} | modules/angular2/docs/core/06_viewport_directive.md | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017998336988966912,
0.00017998336988966912,
0.00017998336988966912,
0.00017998336988966912,
0
]
|
|
{
"id": 6,
"code_window": [
" throw \"Must throw\";\n",
" } catch (e) {\n",
" expect(e.message).toContain(\"Error during instantiation of Engine! (Car -> Engine)\");\n",
" }\n",
" });\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(e.cause instanceof BaseException).toBeTruthy();\n",
" expect(e.causeKey.token).toEqual(Engine);\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "add",
"edit_start_line_idx": 253
} | import {describe, it, expect, beforeEach, ddescribe, iit, xit, el} from 'angular2/test_lib';
import {
ComponentUrlMapper,
RuntimeComponentUrlMapper
} from 'angular2/src/core/compiler/component_url_mapper';
export function main() {
describe('RuntimeComponentUrlMapper', () => {
it('should return the registered URL', () => {
var url = 'http://path/to/component';
var mapper = new RuntimeComponentUrlMapper();
mapper.setComponentUrl(SomeComponent, url);
expect(mapper.getUrl(SomeComponent)).toEqual(url);
});
it('should fallback to ComponentUrlMapper', () => {
var mapper = new ComponentUrlMapper();
var runtimeMapper = new RuntimeComponentUrlMapper();
expect(runtimeMapper.getUrl(SomeComponent)).toEqual(mapper.getUrl(SomeComponent));
});
});
}
class SomeComponent {}
| modules/angular2/test/core/compiler/component_url_mapper_spec.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017622414452489465,
0.00017180725990328938,
0.00016856110596563667,
0.0001706365292193368,
0.00000323609833685623
]
|
{
"id": 6,
"code_window": [
" throw \"Must throw\";\n",
" } catch (e) {\n",
" expect(e.message).toContain(\"Error during instantiation of Engine! (Car -> Engine)\");\n",
" }\n",
" });\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(e.cause instanceof BaseException).toBeTruthy();\n",
" expect(e.causeKey.token).toEqual(Engine);\n"
],
"file_path": "modules/angular2/test/di/injector_spec.js",
"type": "add",
"edit_start_line_idx": 253
} | export * from './foo';
export {Bar1, Bar2} from './bar';
| tools/transpiler/spec/export.js | 0 | https://github.com/angular/angular/commit/e23004df52421a18628f1f45774781bd171f12a2 | [
0.00017285811190959066,
0.00017285811190959066,
0.00017285811190959066,
0.00017285811190959066,
0
]
|
{
"id": 0,
"code_window": [
" indexview=MyIndexView,\n",
" security_manager_class=app.config.get(\"CUSTOM_SECURITY_MANAGER\"))\n",
"\n",
"sm = appbuilder.sm\n",
"\n",
"get_session = appbuilder.get_session\n",
"results_backend = app.config.get(\"RESULTS_BACKEND\")\n",
"\n",
"# Registering sources\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/__init__.py",
"type": "replace",
"edit_start_line_idx": 156
} | # pylint: disable=invalid-unary-operand-type
from collections import OrderedDict
import json
import logging
from copy import deepcopy
from datetime import datetime, timedelta
from six import string_types
from multiprocessing import Pool
import requests
import sqlalchemy as sa
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean,
DateTime, or_, and_,
)
from sqlalchemy.orm import backref, relationship
from dateutil.parser import parse as dparse
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.postaggregator import (
Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality,
)
from pydruid.utils.having import Aggregation
from flask import Markup, escape
from flask_appbuilder.models.decorators import renders
from flask_appbuilder import Model
from flask_babel import lazy_gettext as _
from superset import conf, db, import_util, utils, sm, get_session
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
return datasource.latest_metadata()
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
'type': 'javascript',
'fieldNames': field_names,
'name': name,
'function': function,
}
self.name = name
class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
__tablename__ = 'clusters'
type = "druid"
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
cluster_name = Column(String(250), unique=True)
coordinator_host = Column(String(255))
coordinator_port = Column(Integer, default=8081)
coordinator_endpoint = Column(
String(255), default='druid/coordinator/v1/metadata')
broker_host = Column(String(255))
broker_port = Column(Integer, default=8082)
broker_endpoint = Column(String(255), default='druid/v2')
metadata_last_refreshed = Column(DateTime)
cache_timeout = Column(Integer)
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name
def get_pydruid_client(self):
cli = PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def get_datasources(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
"{obj.coordinator_endpoint}/datasources"
).format(obj=self)
return json.loads(requests.get(endpoint).text)
def get_druid_version(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
).format(obj=self)
return json.loads(requests.get(endpoint).text)['version']
def refresh_datasources(
self,
datasource_name=None,
merge_flag=True,
refreshAll=True):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
ds_list = self.get_datasources()
blacklist = conf.get('DRUID_DATA_SOURCE_BLACKLIST', [])
ds_refresh = []
if not datasource_name:
ds_refresh = list(filter(lambda ds: ds not in blacklist, ds_list))
elif datasource_name not in blacklist and datasource_name in ds_list:
ds_refresh.append(datasource_name)
else:
return
self.refresh_async(ds_refresh, merge_flag, refreshAll)
def refresh_async(self, datasource_names, merge_flag, refreshAll):
"""
Fetches metadata for the specified datasources andm
merges to the Superset database
"""
session = db.session
ds_list = (
session.query(DruidDatasource)
.filter(or_(DruidDatasource.datasource_name == name
for name in datasource_names))
)
ds_map = {ds.name: ds for ds in ds_list}
for ds_name in datasource_names:
datasource = ds_map.get(ds_name, None)
if not datasource:
datasource = DruidDatasource(datasource_name=ds_name)
with session.no_autoflush:
session.add(datasource)
flasher(
"Adding new datasource [{}]".format(ds_name), 'success')
ds_map[ds_name] = datasource
elif refreshAll:
flasher(
"Refreshing datasource [{}]".format(ds_name), 'info')
else:
del ds_map[ds_name]
continue
datasource.cluster = self
datasource.merge_flag = merge_flag
session.flush()
# Prepare multithreaded executation
pool = Pool()
ds_refresh = list(ds_map.values())
metadata = pool.map(_fetch_metadata_for, ds_refresh)
pool.close()
pool.join()
for i in range(0, len(ds_refresh)):
datasource = ds_refresh[i]
cols = metadata[i]
col_objs_list = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == datasource.datasource_name)
.filter(or_(DruidColumn.column_name == col for col in cols))
)
col_objs = {col.column_name: col for col in col_objs_list}
for col in cols:
if col == '__time': # skip the time column
continue
col_obj = col_objs.get(col, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=datasource.datasource_name,
column_name=col)
with session.no_autoflush:
session.add(col_obj)
datatype = cols[col]['type']
if datatype == 'STRING':
col_obj.groupby = True
col_obj.filterable = True
if datatype == 'hyperUnique' or datatype == 'thetaSketch':
col_obj.count_distinct = True
# Allow sum/min/max for long or double
if datatype == 'LONG' or datatype == 'DOUBLE':
col_obj.sum = True
col_obj.min = True
col_obj.max = True
col_obj.type = datatype
col_obj.datasource = datasource
datasource.generate_metrics_for(col_objs_list)
session.commit()
@property
def perm(self):
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
def get_perm(self):
return self.perm
@property
def name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
@property
def unique_name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
class DruidColumn(Model, BaseColumn):
"""ORM model for storing Druid datasource column metadata"""
__tablename__ = 'columns'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
enable_typechecks=False)
dimension_spec_json = Column(Text)
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
)
def __repr__(self):
return self.column_name
@property
def expression(self):
return self.dimension_spec_json
@property
def dimension_spec(self):
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
def get_metrics(self):
metrics = {}
metrics['count'] = DruidMetric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
)
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.is_num:
mt = corrected_type.lower() + 'Sum'
name = 'sum__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.avg and self.is_num:
mt = corrected_type.lower() + 'Avg'
name = 'avg__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.min and self.is_num:
mt = corrected_type.lower() + 'Min'
name = 'min__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
name = 'max__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.count_distinct:
name = 'count_distinct__' + self.column_name
if self.type == 'hyperUnique' or self.type == 'thetaSketch':
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type=self.type,
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
)
else:
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
)
return metrics
def generate_metrics(self):
"""Generate metrics based on the column metadata"""
metrics = self.get_metrics()
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.datasource.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(
DruidMetric.metric_name == m for m in metrics
))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
db.session.add(metric)
@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(DruidColumn).filter(
DruidColumn.datasource_name == lookup_column.datasource_name,
DruidColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
class DruidMetric(Model, BaseMetric):
"""ORM object referencing Druid metrics for a datasource"""
__tablename__ = 'metrics'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
json = Column(Text)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
)
@property
def expression(self):
return self.json
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception:
obj = {}
return obj
@property
def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
) if self.datasource else None
@classmethod
def import_obj(cls, i_metric):
def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_name == lookup_metric.datasource_name,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
class DruidDatasource(Model, BaseDatasource):
"""ORM object referencing Druid datasources (tables)"""
__tablename__ = 'datasources'
type = "druid"
query_langtage = "json"
cluster_class = DruidCluster
metric_class = DruidMetric
column_class = DruidColumn
baselink = "druiddatasourcemodelview"
# Columns
datasource_name = Column(String(255), unique=True)
is_hidden = Column(Boolean, default=False)
fetch_values_from = Column(String(100))
cluster_name = Column(
String(250), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship(
sm.user_model,
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)
@property
def database(self):
return self.cluster
@property
def connection(self):
return str(self.database)
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self):
return self.datasource_name
@property
def schema(self):
ds_name = self.datasource_name or ''
name_pieces = ds_name.split('.')
if len(name_pieces) > 1:
return name_pieces[0]
else:
return None
@property
def schema_perm(self):
"""Returns schema permission if present, cluster one otherwise."""
return utils.get_schema_perm(self.cluster, self.schema)
def get_perm(self):
return (
"[{obj.cluster_name}].[{obj.datasource_name}]"
"(id:{obj.id})").format(obj=self)
@property
def link(self):
name = escape(self.datasource_name)
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
@property
def full_name(self):
return utils.get_datasource_full_name(
self.cluster_name, self.datasource_name)
@property
def time_column_grains(self):
return {
"time_columns": [
'all', '5 seconds', '30 seconds', '1 minute',
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
}
def __repr__(self):
return self.datasource_name
@renders('datasource_name')
def datasource_link(self):
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
name = escape(self.datasource_name)
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
@classmethod
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_datasource(d):
return db.session.query(DruidDatasource).join(DruidCluster).filter(
DruidDatasource.datasource_name == d.datasource_name,
DruidCluster.cluster_name == d.cluster_name,
).first()
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)
@staticmethod
def version_higher(v1, v2):
"""is v1 higher than v2
>>> DruidDatasource.version_higher('0.8.2', '0.9.1')
False
>>> DruidDatasource.version_higher('0.8.2', '0.6.1')
True
>>> DruidDatasource.version_higher('0.8.2', '0.8.2')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9')
False
"""
def int_or_0(v):
try:
v = int(v)
except (TypeError, ValueError):
v = 0
return v
v1nums = [int_or_0(n) for n in v1.split('.')]
v2nums = [int_or_0(n) for n in v2.split('.')]
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return v1nums[0] > v2nums[0] or \
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])
def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
logging.info("Syncing datasource [{}]".format(self.datasource_name))
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['maxTime']
max_time = dparse(max_time)
# Query segmentMetadata for 7 days back. However, due to a bug,
# we need to set this interval to more than 1 day ago to exclude
# realtime segments, which triggered a bug (fixed in druid 0.8.2).
# https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
lbound = (max_time - timedelta(days=7)).isoformat()
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = (max_time - timedelta(1)).isoformat()
else:
rbound = max_time.isoformat()
segment_metadata = None
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed first attempt to get latest segment")
logging.exception(e)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = datetime.now().isoformat()
else:
rbound = datetime(2050, 1, 1).isoformat()[:10]
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed 2nd attempt to get latest segment")
logging.exception(e)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
self.generate_metrics_for(self.columns)
def generate_metrics_for(self, columns):
metrics = {}
for col in columns:
metrics.update(col.get_metrics())
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(DruidMetric.metric_name == m for m in metrics))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
with db.session.no_autoflush:
db.session.add(metric)
@classmethod
def sync_to_db_from_config(
cls,
druid_config,
user,
cluster,
refresh=True):
"""Merges the ds config from druid_config into one stored in the db."""
session = db.session
datasource = (
session.query(cls)
.filter_by(datasource_name=druid_config['name'])
.first()
)
# Create a new datasource.
if not datasource:
datasource = cls(
datasource_name=druid_config['name'],
cluster=cluster,
owner=user,
changed_by_fk=user.id,
created_by_fk=user.id,
)
session.add(datasource)
elif not refresh:
return
dimensions = druid_config['dimensions']
col_objs = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == druid_config['name'])
.filter(or_(DruidColumn.column_name == dim for dim in dimensions))
)
col_objs = {col.column_name: col for col in col_objs}
for dim in dimensions:
col_obj = col_objs.get(dim, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=druid_config['name'],
column_name=dim,
groupby=True,
filterable=True,
# TODO: fetch type from Hive.
type="STRING",
datasource=datasource,
)
session.add(col_obj)
# Import Druid metrics
metric_objs = (
session.query(DruidMetric)
.filter(DruidMetric.datasource_name == druid_config['name'])
.filter(or_(DruidMetric.metric_name == spec['name']
for spec in druid_config["metrics_spec"]))
)
metric_objs = {metric.metric_name: metric for metric in metric_objs}
for metric_spec in druid_config["metrics_spec"]:
metric_name = metric_spec["name"]
metric_type = metric_spec["type"]
metric_json = json.dumps(metric_spec)
if metric_type == "count":
metric_type = "longSum"
metric_json = json.dumps({
"type": "longSum",
"name": metric_name,
"fieldName": metric_name,
})
metric_obj = metric_objs.get(metric_name, None)
if not metric_obj:
metric_obj = DruidMetric(
metric_name=metric_name,
metric_type=metric_type,
verbose_name="%s(%s)" % (metric_type, metric_name),
datasource=datasource,
json=metric_json,
description=(
"Imported from the airolap config dir for %s" %
druid_config['name']),
)
session.add(metric_obj)
session.commit()
@staticmethod
def time_offset(granularity):
if granularity == 'week_ending_saturday':
return 6 * 24 * 3600 * 1000 # 6 days
return 0
# uses https://en.wikipedia.org/wiki/ISO_8601
# http://druid.io/docs/0.8.0/querying/granularities.html
# TODO: pass origin from the UI
@staticmethod
def granularity(period_name, timezone=None, origin=None):
if not period_name or period_name == 'all':
return 'all'
iso_8601_dict = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
}
granularity = {'type': 'period'}
if timezone:
granularity['timeZone'] = timezone
if origin:
dttm = utils.parse_human_datetime(origin)
granularity['origin'] = dttm.isoformat()
if period_name in iso_8601_dict:
granularity['period'] = iso_8601_dict[period_name]
if period_name in ('week_ending_saturday', 'week_starting_sunday'):
# use Sunday as start of the week
granularity['origin'] = '2016-01-03T00:00:00'
elif not isinstance(period_name, string_types):
granularity['type'] = 'duration'
granularity['duration'] = period_name
elif period_name.startswith('P'):
# identify if the string is the iso_8601 period
granularity['period'] = period_name
else:
granularity['type'] = 'duration'
granularity['duration'] = utils.parse_human_timedelta(
period_name).total_seconds() * 1000
return granularity
@staticmethod
def _metrics_and_post_aggs(metrics, metrics_dict):
all_metrics = []
post_aggs = {}
def recursive_get_fields(_conf):
_type = _conf.get('type')
_field = _conf.get('field')
_fields = _conf.get('fields')
field_names = []
if _type in ['fieldAccess', 'hyperUniqueCardinality',
'quantile', 'quantiles']:
field_names.append(_conf.get('fieldName', ''))
if _field:
field_names += recursive_get_fields(_field)
if _fields:
for _f in _fields:
field_names += recursive_get_fields(_f)
return list(set(field_names))
for metric_name in metrics:
metric = metrics_dict[metric_name]
if metric.metric_type != 'postagg':
all_metrics.append(metric_name)
else:
mconf = metric.json_obj
all_metrics += recursive_get_fields(mconf)
all_metrics += mconf.get('fieldNames', [])
if mconf.get('type') == 'javascript':
post_aggs[metric_name] = JavascriptPostAggregator(
name=mconf.get('name', ''),
field_names=mconf.get('fieldNames', []),
function=mconf.get('function', ''))
elif mconf.get('type') == 'quantile':
post_aggs[metric_name] = Quantile(
mconf.get('name', ''),
mconf.get('probability', ''),
)
elif mconf.get('type') == 'quantiles':
post_aggs[metric_name] = Quantiles(
mconf.get('name', ''),
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
mconf.get('fn', "/"),
mconf.get('fields', []),
mconf.get('name', ''))
else:
post_aggs[metric_name] = CustomPostAggregator(
mconf.get('name', ''),
mconf)
return all_metrics, post_aggs
def values_for_column(self,
column_name,
limit=10000):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
else:
from_dttm = datetime(1970, 1, 1)
qry = dict(
datasource=self.datasource_name,
granularity="all",
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
aggregations=dict(count=count("count")),
dimension=column_name,
metric="count",
threshold=limit,
)
client = self.cluster.get_pydruid_client()
client.topn(**qry)
df = client.export_pandas()
return [row[column_name] for row in df.to_records(index=False)]
def get_query_str(self, query_obj, phase=1, client=None):
return self.run_query(client=client, phase=phase, **query_obj)
def _add_filter_from_pre_query_data(self, df, dimensions, dim_filter):
ret = dim_filter
if df is not None and not df.empty:
new_filters = []
for unused, row in df.iterrows():
fields = []
for dim in dimensions:
f = Dimension(dim) == row[dim]
fields.append(f)
if len(fields) > 1:
term = Filter(type="and", fields=fields)
new_filters.append(term)
elif fields:
new_filters.append(fields[0])
if new_filters:
ff = Filter(type="or", fields=new_filters)
if not dim_filter:
ret = ff
else:
ret = Filter(type="and", fields=[ff, dim_filter])
return ret
def run_query( # noqa / druid
self,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
row_limit=None,
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None,
order_desc=True):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
timezone = from_dttm.tzname()
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
columns_dict = {c.column_name: c for c in self.columns}
all_metrics, post_aggs = self._metrics_and_post_aggs(
metrics,
metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
if m.metric_name in all_metrics:
aggregations[m.metric_name] = m.json_obj
rejected_metrics = [
m.metric_name for m in self.metrics
if m.is_restricted and
m.metric_name in aggregations.keys() and
not sm.has_access('metric_access', m.perm)
]
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
)
# the dimensions list with dimensionSpecs expanded
dimensions = []
groupby = [gb for gb in groupby if gb in columns_dict]
for column_name in groupby:
col = columns_dict.get(column_name)
dim_spec = col.dimension_spec
if dim_spec:
dimensions.append(dim_spec)
else:
dimensions.append(column_name)
qry = dict(
datasource=self.datasource_name,
dimensions=dimensions,
aggregations=aggregations,
granularity=DruidDatasource.granularity(
granularity,
timezone=timezone,
origin=extras.get('druid_time_origin'),
),
post_aggregations=post_aggs,
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = self.get_filters(filter)
if filters:
qry['filter'] = filters
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters
order_direction = "descending" if order_desc else "ascending"
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1 and order_desc:
dim = list(qry.get('dimensions'))[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
else:
order_by = list(qry['aggregations'].keys())[0]
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['threshold'] = min(row_limit,
timeseries_limit or row_limit)
pre_qry['metric'] = order_by
pre_qry['dimension'] = dim
del pre_qry['dimensions']
client.topn(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
qry['dimension'] = dim
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters or not order_desc:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
order_by = metrics[0] if metrics else self.metrics[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": min(timeseries_limit, row_limit),
'intervals': (
inner_from_dttm.isoformat() + '/' +
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": order_direction,
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": order_direction,
}],
}
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str
def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(
client=client, query_obj=query_obj, phase=2)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]
# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]
time_offset = DruidDatasource.time_offset(query_obj['granularity'])
def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)
if DTTM_ALIAS in df.columns and time_offset:
df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
def get_filters(self, raw_filters): # noqa
filters = None
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ('in', 'not in'):
eq = [
types.replace("'", '').strip()
if isinstance(types, string_types)
else types
for types in eq]
elif not isinstance(flt['val'], string_types):
eq = eq[0] if len(eq) > 0 else ''
if col in self.num_cols:
if op in ('in', 'not in'):
eq = [utils.string_to_num(v) for v in eq]
else:
eq = utils.string_to_num(eq)
if op == '==':
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
if len(eq) > 1:
for s in eq:
fields.append(Dimension(col) == s)
cond = Filter(type="or", fields=fields)
elif len(eq) == 1:
cond = Dimension(col) == eq[0]
if op == 'not in':
cond = ~cond
elif op == 'regex':
cond = Filter(type="regex", pattern=eq, dimension=col)
elif op == '>=':
cond = Dimension(col) >= eq
elif op == '<=':
cond = Dimension(col) <= eq
elif op == '>':
cond = Dimension(col) > eq
elif op == '<':
cond = Dimension(col) < eq
if filters:
filters = Filter(type="and", fields=[
cond,
filters
])
else:
filters = cond
return filters
def _get_having_obj(self, col, op, eq):
cond = None
if op == '==':
if col in self.column_names:
cond = DimSelector(dimension=col, value=eq)
else:
cond = Aggregation(col) == eq
elif op == '>':
cond = Aggregation(col) > eq
elif op == '<':
cond = Aggregation(col) < eq
return cond
def get_having_filters(self, raw_filters):
filters = None
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
}
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ['==', '>', '<']:
cond = self._get_having_obj(col, op, eq)
elif op in reversed_op_map:
cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
if filters:
filters = filters & cond
else:
filters = cond
return filters
@classmethod
def query_datasources_by_name(
cls, session, database, datasource_name, schema=None):
return (
session.query(cls)
.filter_by(cluster_name=database.id)
.filter_by(datasource_name=datasource_name)
.all()
)
sa.event.listen(DruidDatasource, 'after_insert', set_perm)
sa.event.listen(DruidDatasource, 'after_update', set_perm)
| superset/connectors/druid/models.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.9986228942871094,
0.024944866076111794,
0.00016393751138821244,
0.00017066526925191283,
0.15443648397922516
]
|
{
"id": 0,
"code_window": [
" indexview=MyIndexView,\n",
" security_manager_class=app.config.get(\"CUSTOM_SECURITY_MANAGER\"))\n",
"\n",
"sm = appbuilder.sm\n",
"\n",
"get_session = appbuilder.get_session\n",
"results_backend = app.config.get(\"RESULTS_BACKEND\")\n",
"\n",
"# Registering sources\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/__init__.py",
"type": "replace",
"edit_start_line_idx": 156
} | .country_map svg {
background-color: #feffff;
}
.country_map {
position: relative;
}
.country_map .background {
fill: rgba(255,255,255,0);
pointer-events: all;
}
.country_map .map-layer {
fill: #fff;
stroke: #aaa;
}
.country_map .effect-layer {
pointer-events: none;
}
.country_map text {
font-weight: 300;
color: #333333;
}
.country_map text.big-text {
font-size: 30px;
font-weight: 400;
color: #333333;
}
.country_map path.region {
cursor: pointer;
}
| superset/assets/visualizations/country_map.css | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017272564582526684,
0.0001708691124804318,
0.00016838971350807697,
0.0001711805525701493,
0.0000016451294868602417
]
|
{
"id": 0,
"code_window": [
" indexview=MyIndexView,\n",
" security_manager_class=app.config.get(\"CUSTOM_SECURITY_MANAGER\"))\n",
"\n",
"sm = appbuilder.sm\n",
"\n",
"get_session = appbuilder.get_session\n",
"results_backend = app.config.get(\"RESULTS_BACKEND\")\n",
"\n",
"# Registering sources\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/__init__.py",
"type": "replace",
"edit_start_line_idx": 156
} | #!/bin/bash
set -e
cd "$(dirname "$0")"
npm --version
node --version
npm install -g yarn
yarn
npm run lint
npm run test
npm run build
npm run cover
CODECLIMATE_REPO_TOKEN=ded6121d25d593a1c5aee9f26d85717b19df058f7408cef26910aa731aa7cc3f ./node_modules/.bin/codeclimate-test-reporter < ./coverage/lcov.info
| superset/assets/js_build.sh | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00016609166050329804,
0.0001656284584896639,
0.00016516525647602975,
0.0001656284584896639,
4.6320201363414526e-7
]
|
{
"id": 0,
"code_window": [
" indexview=MyIndexView,\n",
" security_manager_class=app.config.get(\"CUSTOM_SECURITY_MANAGER\"))\n",
"\n",
"sm = appbuilder.sm\n",
"\n",
"get_session = appbuilder.get_session\n",
"results_backend = app.config.get(\"RESULTS_BACKEND\")\n",
"\n",
"# Registering sources\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/__init__.py",
"type": "replace",
"edit_start_line_idx": 156
} | g.superset path {
stroke-dasharray: 5, 5;
}
.nvtooltip tr.highlight td {
font-weight: bold;
font-size: 15px !important;
}
text.nv-axislabel {
font-size: 14px;
}
.dist_bar {
overflow-x: auto !important;
}
.dist_bar svg.nvd3-svg {
width: auto;
font-size: 14px;
}
.nv-x text{
font-size: 12px;
}
.bar {
overflow-x: auto !important;
}
.bar svg.nvd3-svg {
width: auto;
}
text.nv-axislabel {
font-size: 14px !important;
}
| superset/assets/visualizations/nvd3_vis.css | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0001711555232759565,
0.0001707256305962801,
0.00017000120715238154,
0.00017087288142647594,
4.6854540869389893e-7
]
|
{
"id": 1,
"code_window": [
"from __future__ import print_function\n",
"from __future__ import unicode_literals\n",
"\n",
"import logging\n",
"from celery.bin import worker as celery_worker\n",
"from datetime import datetime\n",
"from subprocess import Popen\n",
"\n",
"from colorama import Fore, Style\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/cli.py",
"type": "replace",
"edit_start_line_idx": 7
} | """Views used by the SqlAlchemy connector"""
import logging
from past.builtins import basestring
from flask import Markup, flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
import sqlalchemy as sa
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __
from superset import appbuilder, db, utils, security, sm
from superset.utils import has_access
from superset.connectors.base.views import DatasourceModelView
from superset.views.base import (
SupersetModelView, ListWidgetWithCheckboxes, DeleteMixin, DatasourceFilter,
get_datasource_exist_error_mgs,
)
from . import models
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.TableColumn)
list_title = _('List Columns')
show_title = _('Show Column')
add_title = _('Add Column')
edit_title = _('Edit Column')
can_delete = False
list_widget = ListWidgetWithCheckboxes
edit_columns = [
'column_name', 'verbose_name', 'description',
'type', 'groupby', 'filterable',
'table', 'count_distinct', 'sum', 'min', 'max', 'expression',
'is_dttm', 'python_date_format', 'database_expression']
add_columns = edit_columns
list_columns = [
'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max', 'is_dttm']
page_size = 500
description_columns = {
'is_dttm': _(
"Whether to make this column available as a "
"[Time Granularity] option, column has to be DATETIME or "
"DATETIME-like"),
'filterable': _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."),
'type': _(
"The data type that was inferred by the database. "
"It may be necessary to input a type manually for "
"expression-defined columns in some cases. In most case "
"users should not need to alter this."),
'expression': utils.markdown(
"a valid SQL expression as supported by the underlying backend. "
"Example: `substr(name, 1, 1)`", True),
'python_date_format': utils.markdown(Markup(
"The pattern of timestamp format, use "
"<a href='https://docs.python.org/2/library/"
"datetime.html#strftime-strptime-behavior'>"
"python datetime string pattern</a> "
"expression. If time is stored in epoch "
"format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` "
"below empty if timestamp is stored in "
"String or Integer(epoch) type"), True),
'database_expression': utils.markdown(
"The database expression to cast internal datetime "
"constants to database date/timestamp type according to the DBAPI. "
"The expression should follow the pattern of "
"%Y-%m-%d %H:%M:%S, based on different DBAPI. "
"The string should be a python string formatter \n"
"`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle"
"Superset uses default expression based on DB URI if this "
"field is blank.", True),
}
label_columns = {
'column_name': _("Column"),
'verbose_name': _("Verbose Name"),
'description': _("Description"),
'groupby': _("Groupable"),
'filterable': _("Filterable"),
'table': _("Table"),
'count_distinct': _("Count Distinct"),
'sum': _("Sum"),
'min': _("Min"),
'max': _("Max"),
'expression': _("Expression"),
'is_dttm': _("Is temporal"),
'python_date_format': _("Datetime Format"),
'database_expression': _("Database Expression"),
'type': _('Type'),
}
appbuilder.add_view_no_menu(TableColumnInlineView)
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.SqlMetric)
list_title = _('List Metrics')
show_title = _('Show Metric')
add_title = _('Add Metric')
edit_title = _('Edit Metric')
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
'expression', 'table', 'd3format', 'is_restricted', 'warning_text']
description_columns = {
'expression': utils.markdown(
"a valid SQL expression as supported by the underlying backend. "
"Example: `count(DISTINCT userid)`", True),
'is_restricted': _("Whether the access to this metric is restricted "
"to certain roles. Only roles with the permission "
"'metric access on XXX (the name of this metric)' "
"are allowed to access this metric"),
'd3format': utils.markdown(
"d3 formatting string as defined [here]"
"(https://github.com/d3/d3-format/blob/master/README.md#format). "
"For instance, this default formatting applies in the Table "
"visualization and allow for different metric to use different "
"formats", True
),
}
add_columns = edit_columns
page_size = 500
label_columns = {
'metric_name': _("Metric"),
'description': _("Description"),
'verbose_name': _("Verbose Name"),
'metric_type': _("Type"),
'expression': _("SQL Expression"),
'table': _("Table"),
'd3format': _("D3 Format"),
'is_restricted': _('Is Restricted'),
'warning_text': _('Warning Message'),
}
def post_add(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
def post_update(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
appbuilder.add_view_no_menu(SqlMetricInlineView)
class TableModelView(DatasourceModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.SqlaTable)
list_title = _('List Tables')
show_title = _('Show Table')
add_title = _('Add Table')
edit_title = _('Edit Table')
list_columns = [
'link', 'database',
'changed_by_', 'modified']
add_columns = ['database', 'schema', 'table_name']
edit_columns = [
'table_name', 'sql', 'filter_select_enabled', 'slices',
'fetch_values_predicate', 'database', 'schema',
'description', 'owner',
'main_dttm_col', 'default_endpoint', 'offset', 'cache_timeout']
show_columns = edit_columns + ['perm']
related_views = [TableColumnInlineView, SqlMetricInlineView]
base_order = ('changed_on', 'desc')
search_columns = (
'database', 'schema', 'table_name', 'owner',
)
description_columns = {
'slices': _(
"The list of slices associated with this table. By "
"altering this datasource, you may change how these associated "
"slices behave. "
"Also note that slices need to point to a datasource, so "
"this form will fail at saving if removing slices from a "
"datasource. If you want to change the datasource for a slice, "
"overwrite the slice from the 'explore view'"),
'offset': _("Timezone offset (in hours) for this datasource"),
'table_name': _(
"Name of the table that exists in the source database"),
'schema': _(
"Schema, as used only in some databases like Postgres, Redshift "
"and DB2"),
'description': Markup(
"Supports <a href='https://daringfireball.net/projects/markdown/'>"
"markdown</a>"),
'sql': _(
"This fields acts a Superset view, meaning that Superset will "
"run a query against this string as a subquery."
),
'fetch_values_predicate': _(
"Predicate applied when fetching distinct value to "
"populate the filter control component. Supports "
"jinja template syntax. Applies only when "
"`Enable Filter Select` is on."
),
'default_endpoint': _(
"Redirects to this endpoint when clicking on the table "
"from the table list"),
'filter_select_enabled': _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"),
}
base_filters = [['id', DatasourceFilter, lambda: []]]
label_columns = {
'slices': _("Associated Slices"),
'link': _("Table"),
'changed_by_': _("Changed By"),
'database': _("Database"),
'changed_on_': _("Last Changed"),
'filter_select_enabled': _("Enable Filter Select"),
'schema': _("Schema"),
'default_endpoint': _('Default Endpoint'),
'offset': _("Offset"),
'cache_timeout': _("Cache Timeout"),
'table_name': _("Table Name"),
'fetch_values_predicate': _('Fetch Values Predicate'),
'owner': _("Owner"),
'main_dttm_col': _("Main Datetime Column"),
'description': _('Description'),
}
def pre_add(self, table):
with db.session.no_autoflush:
table_query = db.session.query(models.SqlaTable).filter(
models.SqlaTable.table_name == table.table_name,
models.SqlaTable.schema == table.schema,
models.SqlaTable.database_id == table.database.id)
if db.session.query(table_query.exists()).scalar():
raise Exception(
get_datasource_exist_error_mgs(table.full_name))
# Fail before adding if the table can't be found
if not table.database.has_table(table):
raise Exception(_(
"Table [{}] could not be found, "
"please double check your "
"database connection, schema, and "
"table name").format(table.name))
def post_add(self, table, flash_message=True):
table.fetch_metadata()
security.merge_perm(sm, 'datasource_access', table.get_perm())
if table.schema:
security.merge_perm(sm, 'schema_access', table.schema_perm)
if flash_message:
flash(_(
"The table was created. "
"As part of this two phase configuration "
"process, you should now click the edit button by "
"the new table to configure it."), "info")
def post_update(self, table):
self.post_add(table, flash_message=False)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
@expose('/edit/<pk>', methods=['GET', 'POST'])
@has_access
def edit(self, pk):
"""Simple hack to redirect to explore view after saving"""
resp = super(TableModelView, self).edit(pk)
if isinstance(resp, basestring):
return resp
return redirect('/superset/explore/table/{}/'.format(pk))
appbuilder.add_view(
TableModelView,
"Tables",
label=__("Tables"),
category="Sources",
category_label=__("Sources"),
icon='fa-table',)
appbuilder.add_separator("Sources")
| superset/connectors/sqla/views.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00027809193124994636,
0.00017502970877103508,
0.00016344536561518908,
0.0001717494596960023,
0.00002019152816501446
]
|
{
"id": 1,
"code_window": [
"from __future__ import print_function\n",
"from __future__ import unicode_literals\n",
"\n",
"import logging\n",
"from celery.bin import worker as celery_worker\n",
"from datetime import datetime\n",
"from subprocess import Popen\n",
"\n",
"from colorama import Fore, Style\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/cli.py",
"type": "replace",
"edit_start_line_idx": 7
} | /* eslint-disable no-param-reassign */
/* eslint-disable react/no-multi-comp */
import d3 from 'd3';
import React from 'react';
import PropTypes from 'prop-types';
import ReactDOM from 'react-dom';
import MapGL from 'react-map-gl';
import Immutable from 'immutable';
import supercluster from 'supercluster';
import ViewportMercator from 'viewport-mercator-project';
import {
kmToPixels,
rgbLuminance,
isNumeric,
MILES_PER_KM,
DEFAULT_LONGITUDE,
DEFAULT_LATITUDE,
DEFAULT_ZOOM,
} from '../utils/common';
import './mapbox.css';
const NOOP = () => {};
class ScatterPlotGlowOverlay extends React.Component {
componentDidMount() {
this.redraw();
}
componentDidUpdate() {
this.redraw();
}
drawText(ctx, pixel, options = {}) {
const IS_DARK_THRESHOLD = 110;
const { fontHeight = 0, label = '', radius = 0, rgb = [0, 0, 0], shadow = false } = options;
const maxWidth = radius * 1.8;
const luminance = rgbLuminance(rgb[1], rgb[2], rgb[3]);
ctx.globalCompositeOperation = 'source-over';
ctx.fillStyle = luminance <= IS_DARK_THRESHOLD ? 'white' : 'black';
ctx.font = fontHeight + 'px sans-serif';
ctx.textAlign = 'center';
ctx.textBaseline = 'middle';
if (shadow) {
ctx.shadowBlur = 15;
ctx.shadowColor = luminance <= IS_DARK_THRESHOLD ? 'black' : '';
}
const textWidth = ctx.measureText(label).width;
if (textWidth > maxWidth) {
const scale = fontHeight / textWidth;
ctx.font = scale * maxWidth + 'px sans-serif';
}
ctx.fillText(label, pixel[0], pixel[1]);
ctx.globalCompositeOperation = this.props.compositeOperation;
ctx.shadowBlur = 0;
ctx.shadowColor = '';
}
// Modified: https://github.com/uber/react-map-gl/blob/master/src/overlays/scatterplot.react.js
redraw() {
const props = this.props;
const pixelRatio = window.devicePixelRatio || 1;
const canvas = this.refs.overlay;
const ctx = canvas.getContext('2d');
const radius = props.dotRadius;
const mercator = ViewportMercator(props);
const rgb = props.rgb;
const clusterLabelMap = [];
let maxLabel = -1;
props.locations.forEach(function (location, i) {
if (location.get('properties').get('cluster')) {
let clusterLabel = location.get('properties').get('metric')
? location.get('properties').get('metric')
: location.get('properties').get('point_count');
if (clusterLabel instanceof Immutable.List) {
clusterLabel = clusterLabel.toArray();
if (props.aggregatorName === 'mean') {
clusterLabel = d3.mean(clusterLabel);
} else if (props.aggregatorName === 'median') {
clusterLabel = d3.median(clusterLabel);
} else if (props.aggregatorName === 'stdev') {
clusterLabel = d3.deviation(clusterLabel);
} else {
clusterLabel = d3.variance(clusterLabel);
}
}
clusterLabel = isNumeric(clusterLabel)
? d3.round(clusterLabel, 2)
: location.get('properties').get('point_count');
maxLabel = Math.max(clusterLabel, maxLabel);
clusterLabelMap[i] = clusterLabel;
}
}, this);
ctx.save();
ctx.scale(pixelRatio, pixelRatio);
ctx.clearRect(0, 0, props.width, props.height);
ctx.globalCompositeOperation = props.compositeOperation;
if ((props.renderWhileDragging || !props.isDragging) && props.locations) {
props.locations.forEach(function _forEach(location, i) {
const pixel = mercator.project(props.lngLatAccessor(location));
const pixelRounded = [d3.round(pixel[0], 1), d3.round(pixel[1], 1)];
if (pixelRounded[0] + radius >= 0
&& pixelRounded[0] - radius < props.width
&& pixelRounded[1] + radius >= 0
&& pixelRounded[1] - radius < props.height) {
ctx.beginPath();
if (location.get('properties').get('cluster')) {
let clusterLabel = clusterLabelMap[i];
const scaledRadius = d3.round(Math.pow(clusterLabel / maxLabel, 0.5) * radius, 1);
const fontHeight = d3.round(scaledRadius * 0.5, 1);
const gradient = ctx.createRadialGradient(
pixelRounded[0], pixelRounded[1], scaledRadius,
pixelRounded[0], pixelRounded[1], 0,
);
gradient.addColorStop(1, 'rgba(' + rgb[1] + ', ' + rgb[2] + ', ' + rgb[3] + ', 0.8)');
gradient.addColorStop(0, 'rgba(' + rgb[1] + ', ' + rgb[2] + ', ' + rgb[3] + ', 0)');
ctx.arc(pixelRounded[0], pixelRounded[1], scaledRadius, 0, Math.PI * 2);
ctx.fillStyle = gradient;
ctx.fill();
if (isNumeric(clusterLabel)) {
if (clusterLabel >= 10000) {
clusterLabel = Math.round(clusterLabel / 1000) + 'k';
} else if (clusterLabel >= 1000) {
clusterLabel = (Math.round(clusterLabel / 100) / 10) + 'k';
}
this.drawText(ctx, pixelRounded, {
fontHeight,
label: clusterLabel,
radius: scaledRadius,
rgb,
shadow: true,
});
}
} else {
const defaultRadius = radius / 6;
const radiusProperty = location.get('properties').get('radius');
const pointMetric = location.get('properties').get('metric');
let pointRadius = radiusProperty === null ? defaultRadius : radiusProperty;
let pointLabel;
if (radiusProperty !== null) {
const pointLatitude = props.lngLatAccessor(location)[1];
if (props.pointRadiusUnit === 'Kilometers') {
pointLabel = d3.round(pointRadius, 2) + 'km';
pointRadius = kmToPixels(pointRadius, pointLatitude, props.zoom);
} else if (props.pointRadiusUnit === 'Miles') {
pointLabel = d3.round(pointRadius, 2) + 'mi';
pointRadius = kmToPixels(pointRadius * MILES_PER_KM, pointLatitude, props.zoom);
}
}
if (pointMetric !== null) {
pointLabel = isNumeric(pointMetric) ? d3.round(pointMetric, 2) : pointMetric;
}
// Fall back to default points if pointRadius wasn't a numerical column
if (!pointRadius) {
pointRadius = defaultRadius;
}
ctx.arc(pixelRounded[0], pixelRounded[1], d3.round(pointRadius, 1), 0, Math.PI * 2);
ctx.fillStyle = 'rgb(' + rgb[1] + ', ' + rgb[2] + ', ' + rgb[3] + ')';
ctx.fill();
if (pointLabel !== undefined) {
this.drawText(ctx, pixelRounded, {
fontHeight: d3.round(pointRadius, 1),
label: pointLabel,
radius: pointRadius,
rgb,
shadow: false,
});
}
}
}
}, this);
}
ctx.restore();
}
render() {
let width = 0;
let height = 0;
if (this.context.viewport) {
width = this.context.viewport.width;
height = this.context.viewport.height;
}
const { globalOpacity } = this.props;
const pixelRatio = window.devicePixelRatio || 1;
return (
React.createElement('canvas', {
ref: 'overlay',
width: width * pixelRatio,
height: height * pixelRatio,
style: {
width: `${width}px`,
height: `${height}px`,
position: 'absolute',
pointerEvents: 'none',
opacity: globalOpacity,
left: 0,
top: 0,
},
})
);
}
}
ScatterPlotGlowOverlay.propTypes = {
locations: PropTypes.instanceOf(Immutable.List).isRequired,
lngLatAccessor: PropTypes.func,
renderWhileDragging: PropTypes.bool,
globalOpacity: PropTypes.number,
dotRadius: PropTypes.number,
dotFill: PropTypes.string,
compositeOperation: PropTypes.string,
};
ScatterPlotGlowOverlay.defaultProps = {
lngLatAccessor: location => [location.get(0), location.get(1)],
renderWhileDragging: true,
dotRadius: 4,
dotFill: '#1FBAD6',
globalOpacity: 1,
// Same as browser default.
compositeOperation: 'source-over',
};
ScatterPlotGlowOverlay.contextTypes = {
viewport: PropTypes.object,
isDragging: PropTypes.bool,
};
class MapboxViz extends React.Component {
constructor(props) {
super(props);
const longitude = this.props.viewportLongitude || DEFAULT_LONGITUDE;
const latitude = this.props.viewportLatitude || DEFAULT_LATITUDE;
this.state = {
viewport: {
longitude,
latitude,
zoom: this.props.viewportZoom || DEFAULT_ZOOM,
startDragLngLat: [longitude, latitude],
},
};
this.onViewportChange = this.onViewportChange.bind(this);
}
onViewportChange(viewport) {
this.setState({ viewport });
this.props.setControlValue('viewport_longitude', viewport.longitude);
this.props.setControlValue('viewport_latitude', viewport.latitude);
this.props.setControlValue('viewport_zoom', viewport.zoom);
}
render() {
const mercator = ViewportMercator({
width: this.props.sliceWidth,
height: this.props.sliceHeight,
longitude: this.state.viewport.longitude,
latitude: this.state.viewport.latitude,
zoom: this.state.viewport.zoom,
});
const topLeft = mercator.unproject([0, 0]);
const bottomRight = mercator.unproject([this.props.sliceWidth, this.props.sliceHeight]);
const bbox = [topLeft[0], bottomRight[1], bottomRight[0], topLeft[1]];
const clusters = this.props.clusterer.getClusters(bbox, Math.round(this.state.viewport.zoom));
const isDragging = this.state.viewport.isDragging === undefined ? false :
this.state.viewport.isDragging;
return (
<MapGL
{...this.state.viewport}
mapStyle={this.props.mapStyle}
width={this.props.sliceWidth}
height={this.props.sliceHeight}
mapboxApiAccessToken={this.props.mapboxApiKey}
onViewportChange={this.onViewportChange}
>
<ScatterPlotGlowOverlay
{...this.state.viewport}
isDragging={isDragging}
width={this.props.sliceWidth}
height={this.props.sliceHeight}
locations={Immutable.fromJS(clusters)}
dotRadius={this.props.pointRadius}
pointRadiusUnit={this.props.pointRadiusUnit}
rgb={this.props.rgb}
globalOpacity={this.props.globalOpacity}
compositeOperation={'screen'}
renderWhileDragging={this.props.renderWhileDragging}
aggregatorName={this.props.aggregatorName}
lngLatAccessor={function (location) {
const coordinates = location.get('geometry').get('coordinates');
return [coordinates.get(0), coordinates.get(1)];
}}
/>
</MapGL>
);
}
}
MapboxViz.propTypes = {
aggregatorName: PropTypes.string,
clusterer: PropTypes.object,
setControlValue: PropTypes.func,
globalOpacity: PropTypes.number,
mapStyle: PropTypes.string,
mapboxApiKey: PropTypes.string,
pointRadius: PropTypes.number,
pointRadiusUnit: PropTypes.string,
renderWhileDragging: PropTypes.bool,
rgb: PropTypes.array,
sliceHeight: PropTypes.number,
sliceWidth: PropTypes.number,
viewportLatitude: PropTypes.number,
viewportLongitude: PropTypes.number,
viewportZoom: PropTypes.number,
};
function mapbox(slice, json, setControlValue) {
const div = d3.select(slice.selector);
const DEFAULT_POINT_RADIUS = 60;
const DEFAULT_MAX_ZOOM = 16;
// Validate mapbox color
const rgb = /^rgb\((\d{1,3}),\s*(\d{1,3}),\s*(\d{1,3})\)$/.exec(json.data.color);
if (rgb === null) {
slice.error('Color field must be of form \'rgb(%d, %d, %d)\'');
return;
}
const aggName = json.data.aggregatorName;
let reducer;
if (aggName === 'sum' || !json.data.customMetric) {
reducer = function (a, b) {
return a + b;
};
} else if (aggName === 'min') {
reducer = Math.min;
} else if (aggName === 'max') {
reducer = Math.max;
} else {
reducer = function (a, b) {
if (a instanceof Array) {
if (b instanceof Array) {
return a.concat(b);
}
a.push(b);
return a;
}
if (b instanceof Array) {
b.push(a);
return b;
}
return [a, b];
};
}
const clusterer = supercluster({
radius: json.data.clusteringRadius,
maxZoom: DEFAULT_MAX_ZOOM,
metricKey: 'metric',
metricReducer: reducer,
});
clusterer.load(json.data.geoJSON.features);
div.selectAll('*').remove();
ReactDOM.render(
<MapboxViz
{...json.data}
rgb={rgb}
sliceHeight={slice.height()}
sliceWidth={slice.width()}
clusterer={clusterer}
pointRadius={DEFAULT_POINT_RADIUS}
aggregatorName={aggName}
setControlValue={setControlValue || NOOP}
/>,
div.node(),
);
}
module.exports = mapbox;
| superset/assets/visualizations/mapbox.jsx | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017835017933975905,
0.00017382651276420802,
0.0001658251858316362,
0.00017487216973677278,
0.0000029120596991560888
]
|
{
"id": 1,
"code_window": [
"from __future__ import print_function\n",
"from __future__ import unicode_literals\n",
"\n",
"import logging\n",
"from celery.bin import worker as celery_worker\n",
"from datetime import datetime\n",
"from subprocess import Popen\n",
"\n",
"from colorama import Fore, Style\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/cli.py",
"type": "replace",
"edit_start_line_idx": 7
} | /* eslint-disable global-require, import/no-dynamic-require */
import React from 'react';
import { sprintf } from 'sprintf-js';
import i18n from './i18n';
function formatForReact(formatString, args) {
const rv = [];
let cursor = 0;
sprintf.parse(formatString).forEach((match, idx) => {
const cpoyMatch = match;
let copyIdx = idx;
if (typeof match === 'string') {
rv.push(match);
} else {
let arg = null;
if (match[2]) {
arg = args[0][match[2][0]];
} else if (match[1]) {
arg = args[parseInt(match[1], 10) - 1];
} else {
arg = args[cursor++];
}
if (React.isValidElement(arg)) {
rv.push(React.cloneElement(arg, { key: idx }));
} else {
cpoyMatch[2] = null;
cpoyMatch[1] = 1;
rv.push(<span key={copyIdx++}>
{sprintf.format([cpoyMatch], [null, arg])}
</span>);
}
}
});
return rv;
}
function argsInvolveReact(args) {
if (args.some(React.isValidElement)) {
return true;
}
if (args.length === 1 && typeof args[0] === 'object') {
return Object.keys(args[0]).some(function (key) {
return React.isValidElement(args[0][key]);
});
}
return false;
}
export function parseComponentTemplate(string) {
const rv = {};
function process(startPos, group, inGroup) {
const regex = /\[(.*?)(:|\])|\]/g;
let match;
const buf = [];
let satisfied = false;
let pos = regex.lastIndex = startPos;
match = regex.exec(string);
while (match !== null) {
const substr = string.substr(pos, match.index - pos);
if (substr !== '') {
buf.push(substr);
}
if (match[0] === ']') {
if (inGroup) {
satisfied = true;
break;
} else {
pos = regex.lastIndex;
continue;
}
}
if (match[2] === ']') {
pos = regex.lastIndex;
} else {
pos = regex.lastIndex = process(regex.lastIndex, match[1], true);
}
buf.push({ group: match[1] });
match = regex.exec(string);
}
let endPos = regex.lastIndex;
if (!satisfied) {
const rest = string.substr(pos);
if (rest) {
buf.push(rest);
}
endPos = string.length;
}
rv[group] = buf;
return endPos;
}
process(0, 'root', false);
return rv;
}
export function renderComponentTemplate(template, components) {
let idx = 0;
function renderGroup(group) {
const children = [];
(template[group] || []).forEach((item) => {
if (typeof item === 'string') {
children.push(<span key={idx++}>{item}</span>);
} else {
children.push(renderGroup(item.group));
}
});
let reference = components[group] || <span key={idx++} />;
if (!React.isValidElement(reference)) {
reference = <span key={idx++}>{reference}</span>;
}
if (children.length > 0) {
return React.cloneElement(reference, { key: idx++ }, children);
}
return React.cloneElement(reference, { key: idx++ });
}
return renderGroup('root');
}
export function format(formatString, args) {
if (argsInvolveReact(args)) {
return formatForReact(formatString, args);
}
return sprintf(formatString, ...args);
}
export function gettext(string, ...args) {
if (!string || !i18n) {
return string;
}
let rv = i18n.gettext(string);
if (args.length > 0) {
rv = format(rv, args);
}
return rv;
}
export function ngettext(singular, plural, ...args) {
return format(i18n.ngettext(singular, plural, args[0] || 0), args);
}
export function gettextComponentTemplate(template, components) {
const tmpl = parseComponentTemplate(i18n.gettext(template));
return renderComponentTemplate(tmpl, components);
}
export const t = gettext;
export const tn = ngettext;
export const tct = gettextComponentTemplate;
| superset/assets/javascripts/locales.jsx | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017621876031626016,
0.00017291336553171277,
0.0001608415477676317,
0.00017443185788579285,
0.000003682214583022869
]
|
{
"id": 1,
"code_window": [
"from __future__ import print_function\n",
"from __future__ import unicode_literals\n",
"\n",
"import logging\n",
"from celery.bin import worker as celery_worker\n",
"from datetime import datetime\n",
"from subprocess import Popen\n",
"\n",
"from colorama import Fore, Style\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/cli.py",
"type": "replace",
"edit_start_line_idx": 7
} | import d3 from 'd3';
import { getColorFromScheme } from '../javascripts/modules/colors';
require('./histogram.css');
function histogram(slice, payload) {
const div = d3.select(slice.selector);
const draw = function (data, numBins) {
// Set Margins
const margin = {
top: 50,
right: 10,
bottom: 20,
left: 50,
};
const navBarHeight = 36;
const navBarBuffer = 10;
const width = slice.width() - margin.left - margin.right;
const height = slice.height() - margin.top - margin.bottom - navBarHeight - navBarBuffer;
// Set Histogram objects
const formatNumber = d3.format(',.0f');
const formatTicks = d3.format(',.00f');
const x = d3.scale.ordinal();
const y = d3.scale.linear();
const xAxis = d3.svg.axis()
.scale(x)
.orient('bottom')
.ticks(numBins)
.tickFormat(formatTicks);
const yAxis = d3.svg.axis()
.scale(y)
.orient('left')
.ticks(numBins);
// Calculate bins for the data
const bins = d3.layout.histogram().bins(numBins)(data);
// Set the x-values
x.domain(bins.map(d => d.x))
.rangeRoundBands([0, width], 0.1);
// Set the y-values
y.domain([0, d3.max(bins, d => d.y)])
.range([height, 0]);
// Create the svg value with the bins
const svg = div.selectAll('svg')
.data([bins])
.enter()
.append('svg');
// Make a rectangular background fill
svg.append('rect')
.attr('width', '100%')
.attr('height', '100%')
.attr('fill', '#f6f6f6');
// Transform the svg to make space for the margins
const gEnter = svg
.append('g')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// Add the bars and the x axis
gEnter.append('g').attr('class', 'bars');
gEnter.append('g').attr('class', 'x axis');
// Add width and height to the svg
svg.attr('width', slice.width())
.attr('height', slice.height());
// Create the bars in the svg
const bar = svg.select('.bars').selectAll('.bar').data(bins);
bar.enter().append('rect');
bar.exit().remove();
// Set the Height and Width for each bar
bar.attr('width', x.rangeBand())
.attr('x', d => x(d.x))
.attr('y', d => y(d.y))
.attr('height', d => y.range()[0] - y(d.y))
.style('fill', d => getColorFromScheme(d.length, slice.formData.color_scheme))
.order();
// Find maximum length to position the ticks on top of the bar correctly
const maxLength = d3.max(bins, d => d.length);
function textAboveBar(d) {
return d.length / maxLength < 0.1;
}
// Add a bar text to each bar in the histogram
svg.selectAll('.bartext')
.data(bins)
.enter()
.append('text')
.attr('dy', '.75em')
.attr('y', function (d) {
let padding = 0.0;
if (textAboveBar(d)) {
padding = 12.0;
} else {
padding = -8.0;
}
return y(d.y) - padding;
})
.attr('x', d => x(d.x) + (x.rangeBand() / 2))
.attr('text-anchor', 'middle')
.attr('font-weight', 'bold')
.attr('font-size', '15px')
.text(d => formatNumber(d.y))
.attr('fill', d => textAboveBar(d) ? 'black' : 'white')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
// Update the x-axis
svg.append('g')
.attr('class', 'axis')
.attr('transform', 'translate(' + margin.left + ',' + (height + margin.top) + ')')
.text('values')
.call(xAxis);
// Update the Y Axis and add minor lines
svg.append('g')
.attr('class', 'axis')
.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')
.text('count')
.call(yAxis)
.selectAll('g')
.filter(function (d) { return d; })
.classed('minor', true);
};
const numBins = Number(slice.formData.link_length) || 10;
div.selectAll('*').remove();
draw(payload.data, numBins);
}
module.exports = histogram;
| superset/assets/visualizations/histogram.js | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0001790166861610487,
0.0001743875618558377,
0.00016970207798294723,
0.00017499862588010728,
0.0000024872472295101034
]
|
{
"id": 2,
"code_window": [
"import requests\n",
"import sqlalchemy as sa\n",
"from sqlalchemy import (\n",
" Column, Integer, String, ForeignKey, Text, Boolean,\n",
" DateTime, or_, and_,\n",
")\n",
"from sqlalchemy.orm import backref, relationship\n",
"from dateutil.parser import parse as dparse\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" DateTime, or_,\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 13
} | from datetime import datetime
import logging
import sqlalchemy as sqla
from flask import Markup, flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __
from superset import db, utils, appbuilder, sm, security
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import has_access
from superset.connectors.base.views import DatasourceModelView
from superset.views.base import (
BaseSupersetView,
SupersetModelView, validate_json, DeleteMixin, ListWidgetWithCheckboxes,
DatasourceFilter, get_datasource_exist_error_mgs)
from . import models
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidColumn)
list_title = _('List Druid Column')
show_title = _('Show Druid Column')
add_title = _('Add Druid Column')
edit_title = _('Edit Druid Column')
edit_columns = [
'column_name', 'description', 'dimension_spec_json', 'datasource',
'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
add_columns = edit_columns
list_columns = [
'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max']
can_delete = False
page_size = 500
label_columns = {
'column_name': _("Column"),
'type': _("Type"),
'datasource': _("Datasource"),
'groupby': _("Groupable"),
'filterable': _("Filterable"),
'count_distinct': _("Count Distinct"),
'sum': _("Sum"),
'min': _("Min"),
'max': _("Max"),
}
description_columns = {
'filterable': _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."),
'dimension_spec_json': utils.markdown(
"this field can be used to specify "
"a `dimensionSpec` as documented [here]"
"(http://druid.io/docs/latest/querying/dimensionspecs.html). "
"Make sure to input valid JSON and that the "
"`outputName` matches the `column_name` defined "
"above.",
True),
}
def post_update(self, col):
col.generate_metrics()
utils.validate_json(col.dimension_spec_json)
def post_add(self, col):
self.post_update(col)
appbuilder.add_view_no_menu(DruidColumnInlineView)
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidMetric)
list_title = _('List Druid Metric')
show_title = _('Show Druid Metric')
add_title = _('Add Druid Metric')
edit_title = _('Edit Druid Metric')
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
'datasource', 'd3format', 'is_restricted', 'warning_text']
add_columns = edit_columns
page_size = 500
validators_columns = {
'json': [validate_json],
}
description_columns = {
'metric_type': utils.markdown(
"use `postagg` as the metric type if you are defining a "
"[Druid Post Aggregation]"
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
True),
'is_restricted': _("Whether the access to this metric is restricted "
"to certain roles. Only roles with the permission "
"'metric access on XXX (the name of this metric)' "
"are allowed to access this metric"),
}
label_columns = {
'metric_name': _("Metric"),
'description': _("Description"),
'verbose_name': _("Verbose Name"),
'metric_type': _("Type"),
'json': _("JSON"),
'datasource': _("Druid Datasource"),
'warning_text': _("Warning Message"),
}
def post_add(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
def post_update(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DruidClusterModelView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidCluster)
list_title = _('List Druid Cluster')
show_title = _('Show Druid Cluster')
add_title = _('Add Druid Cluster')
edit_title = _('Edit Druid Cluster')
add_columns = [
'verbose_name', 'coordinator_host', 'coordinator_port',
'coordinator_endpoint', 'broker_host', 'broker_port',
'broker_endpoint', 'cache_timeout', 'cluster_name',
]
edit_columns = add_columns
list_columns = ['cluster_name', 'metadata_last_refreshed']
search_columns = ('cluster_name',)
label_columns = {
'cluster_name': _("Cluster"),
'coordinator_host': _("Coordinator Host"),
'coordinator_port': _("Coordinator Port"),
'coordinator_endpoint': _("Coordinator Endpoint"),
'broker_host': _("Broker Host"),
'broker_port': _("Broker Port"),
'broker_endpoint': _("Broker Endpoint"),
}
def pre_add(self, cluster):
security.merge_perm(sm, 'database_access', cluster.perm)
def pre_update(self, cluster):
self.pre_add(cluster)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidClusterModelView,
name="Druid Clusters",
label=__("Druid Clusters"),
icon="fa-cubes",
category="Sources",
category_label=__("Sources"),
category_icon='fa-database',)
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidDatasource)
list_title = _('List Druid Datasource')
show_title = _('Show Druid Datasource')
add_title = _('Add Druid Datasource')
edit_title = _('Edit Druid Datasource')
list_widget = ListWidgetWithCheckboxes
list_columns = [
'datasource_link', 'cluster', 'changed_by_', 'modified']
related_views = [DruidColumnInlineView, DruidMetricInlineView]
edit_columns = [
'datasource_name', 'cluster', 'slices', 'description', 'owner',
'is_hidden',
'filter_select_enabled', 'fetch_values_from',
'default_endpoint', 'offset', 'cache_timeout']
search_columns = (
'datasource_name', 'cluster', 'description', 'owner'
)
add_columns = edit_columns
show_columns = add_columns + ['perm']
page_size = 500
base_order = ('datasource_name', 'asc')
description_columns = {
'slices': _(
"The list of slices associated with this table. By "
"altering this datasource, you may change how these associated "
"slices behave. "
"Also note that slices need to point to a datasource, so "
"this form will fail at saving if removing slices from a "
"datasource. If you want to change the datasource for a slice, "
"overwrite the slice from the 'explore view'"),
'offset': _("Timezone offset (in hours) for this datasource"),
'description': Markup(
"Supports <a href='"
"https://daringfireball.net/projects/markdown/'>markdown</a>"),
'fetch_values_from': _(
"Time expression to use as a predicate when retrieving "
"distinct values to populate the filter component. "
"Only applies when `Enable Filter Select` is on. If "
"you enter `7 days ago`, the distinct list of values in "
"the filter will be populated based on the distinct value over "
"the past week"),
'filter_select_enabled': _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"),
'default_endpoint': _(
"Redirects to this endpoint when clicking on the datasource "
"from the datasource list"),
}
base_filters = [['id', DatasourceFilter, lambda: []]]
label_columns = {
'slices': _("Associated Slices"),
'datasource_link': _("Data Source"),
'cluster': _("Cluster"),
'description': _("Description"),
'owner': _("Owner"),
'is_hidden': _("Is Hidden"),
'filter_select_enabled': _("Enable Filter Select"),
'default_endpoint': _("Default Endpoint"),
'offset': _("Time Offset"),
'cache_timeout': _("Cache Timeout"),
}
def pre_add(self, datasource):
with db.session.no_autoflush:
query = (
db.session.query(models.DruidDatasource)
.filter(models.DruidDatasource.datasource_name ==
datasource.datasource_name,
models.DruidDatasource.cluster_name ==
datasource.cluster.id)
)
if db.session.query(query.exists()).scalar():
raise Exception(get_datasource_exist_error_mgs(
datasource.full_name))
def post_add(self, datasource):
datasource.generate_metrics()
security.merge_perm(sm, 'datasource_access', datasource.get_perm())
if datasource.schema:
security.merge_perm(sm, 'schema_access', datasource.schema_perm)
def post_update(self, datasource):
self.post_add(datasource)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidDatasourceModelView,
"Druid Datasources",
label=__("Druid Datasources"),
category="Sources",
category_label=__("Sources"),
icon="fa-cube")
class Druid(BaseSupersetView):
"""The base views for Superset!"""
@has_access
@expose("/refresh_datasources/")
def refresh_datasources(self, refreshAll=True):
"""endpoint that refreshes druid datasources metadata"""
session = db.session()
DruidCluster = ConnectorRegistry.sources['druid'].cluster_class
for cluster in session.query(DruidCluster).all():
cluster_name = cluster.cluster_name
try:
cluster.refresh_datasources(refreshAll=refreshAll)
except Exception as e:
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)),
"danger")
logging.exception(e)
return redirect('/druidclustermodelview/list/')
cluster.metadata_last_refreshed = datetime.now()
flash(
"Refreshed metadata from cluster "
"[" + cluster.cluster_name + "]",
'info')
session.commit()
return redirect("/druiddatasourcemodelview/list/")
@has_access
@expose("/scan_new_datasources/")
def scan_new_datasources(self):
"""
Calling this endpoint will cause a scan for new
datasources only and add them.
"""
return self.refresh_datasources(refreshAll=False)
appbuilder.add_view_no_menu(Druid)
appbuilder.add_link(
"Scan New Datasources",
label=__("Scan New Datasources"),
href='/druid/scan_new_datasources/',
category='Sources',
category_label=__("Sources"),
category_icon='fa-database',
icon="fa-refresh")
appbuilder.add_link(
"Refresh Druid Metadata",
label=__("Refresh Druid Metadata"),
href='/druid/refresh_datasources/',
category='Sources',
category_label=__("Sources"),
category_icon='fa-database',
icon="fa-cog")
appbuilder.add_separator("Sources", )
| superset/connectors/druid/views.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.003738949541002512,
0.0003473651595413685,
0.0001647396566113457,
0.00017169868806377053,
0.0006360319093801081
]
|
{
"id": 2,
"code_window": [
"import requests\n",
"import sqlalchemy as sa\n",
"from sqlalchemy import (\n",
" Column, Integer, String, ForeignKey, Text, Boolean,\n",
" DateTime, or_, and_,\n",
")\n",
"from sqlalchemy.orm import backref, relationship\n",
"from dateutil.parser import parse as dparse\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" DateTime, or_,\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 13
} | import React from 'react';
import PropTypes from 'prop-types';
import { Button } from 'react-bootstrap';
import Select from 'react-select';
import QueryTable from './QueryTable';
import { now, epochTimeXHoursAgo,
epochTimeXDaysAgo, epochTimeXYearsAgo } from '../../modules/dates';
import { STATUS_OPTIONS, TIME_OPTIONS } from '../constants';
import AsyncSelect from '../../components/AsyncSelect';
import { t } from '../../locales';
const $ = window.$ = require('jquery');
const propTypes = {
actions: PropTypes.object.isRequired,
height: PropTypes.number.isRequired,
};
class QuerySearch extends React.PureComponent {
constructor(props) {
super(props);
this.state = {
userLoading: false,
userOptions: [],
databaseId: null,
userId: null,
searchText: null,
from: '28 days ago',
to: 'now',
status: 'success',
queriesArray: [],
queriesLoading: true,
};
}
componentDidMount() {
this.refreshQueries();
}
onUserClicked(userId) {
this.setState({ userId }, () => { this.refreshQueries(); });
}
onDbClicked(dbId) {
this.setState({ databaseId: dbId }, () => { this.refreshQueries(); });
}
onChange(db) {
const val = (db) ? db.value : null;
this.setState({ databaseId: val });
}
getTimeFromSelection(selection) {
switch (selection) {
case 'now':
return now();
case '1 hour ago':
return epochTimeXHoursAgo(1);
case '1 day ago':
return epochTimeXDaysAgo(1);
case '7 days ago':
return epochTimeXDaysAgo(7);
case '28 days ago':
return epochTimeXDaysAgo(28);
case '90 days ago':
return epochTimeXDaysAgo(90);
case '1 year ago':
return epochTimeXYearsAgo(1);
default:
return null;
}
}
changeFrom(user) {
const val = (user) ? user.value : null;
this.setState({ from: val });
}
changeTo(status) {
const val = (status) ? status.value : null;
this.setState({ to: val });
}
changeUser(user) {
const val = (user) ? user.value : null;
this.setState({ userId: val });
}
insertParams(baseUrl, params) {
const validParams = params.filter(
function (p) { return p !== ''; },
);
return baseUrl + '?' + validParams.join('&');
}
changeStatus(status) {
const val = (status) ? status.value : null;
this.setState({ status: val });
}
changeSearch(event) {
this.setState({ searchText: event.target.value });
}
userMutator(data) {
const options = [];
for (let i = 0; i < data.pks.length; i++) {
options.push({ value: data.pks[i], label: data.result[i].username });
}
return options;
}
dbMutator(data) {
const options = data.result.map(db => ({ value: db.id, label: db.database_name }));
this.props.actions.setDatabases(data.result);
if (data.result.length === 0) {
this.props.actions.addAlert({
bsStyle: 'danger',
msg: t('It seems you don\'t have access to any database'),
});
}
return options;
}
refreshQueries() {
this.setState({ queriesLoading: true });
const params = [
this.state.userId ? `user_id=${this.state.userId}` : '',
this.state.databaseId ? `database_id=${this.state.databaseId}` : '',
this.state.searchText ? `search_text=${this.state.searchText}` : '',
this.state.status ? `status=${this.state.status}` : '',
this.state.from ? `from=${this.getTimeFromSelection(this.state.from)}` : '',
this.state.to ? `to=${this.getTimeFromSelection(this.state.to)}` : '',
];
const url = this.insertParams('/superset/search_queries', params);
$.getJSON(url, (data, status) => {
if (status === 'success') {
this.setState({ queriesArray: data, queriesLoading: false });
}
});
}
render() {
return (
<div>
<div id="search-header" className="row space-1">
<div className="col-sm-2">
<AsyncSelect
dataEndpoint="/users/api/read"
mutator={this.userMutator}
value={this.state.userId}
onChange={this.changeUser.bind(this)}
/>
</div>
<div className="col-sm-2">
<AsyncSelect
onChange={this.onChange.bind(this)}
dataEndpoint="/databaseasync/api/read?_flt_0_expose_in_sqllab=1"
value={this.state.databaseId}
mutator={this.dbMutator.bind(this)}
/>
</div>
<div className="col-sm-4">
<input
type="text"
onChange={this.changeSearch.bind(this)}
className="form-control input-sm"
placeholder={t('Search Results')}
/>
</div>
<div className="col-sm-4 search-date-filter-container">
<Select
name="select-from"
placeholder={t('[From]-')}
options={TIME_OPTIONS
.slice(1, TIME_OPTIONS.length).map(xt => ({ value: xt, label: xt }))}
value={this.state.from}
autosize={false}
onChange={this.changeFrom.bind(this)}
/>
<Select
name="select-to"
placeholder={t('[To]-')}
options={TIME_OPTIONS.map(xt => ({ value: xt, label: xt }))}
value={this.state.to}
autosize={false}
onChange={this.changeTo.bind(this)}
/>
<Select
name="select-status"
placeholder={t('[Query Status]')}
options={STATUS_OPTIONS.map(s => ({ value: s, label: s }))}
value={this.state.status}
isLoading={false}
autosize={false}
onChange={this.changeStatus.bind(this)}
/>
<Button bsSize="small" bsStyle="success" onClick={this.refreshQueries.bind(this)}>
{t('Search')}
</Button>
</div>
</div>
{this.state.queriesLoading ?
(<img className="loading" alt="Loading..." src="/static/assets/images/loading.gif" />)
:
(
<div className="scrollbar-container">
<div
className="scrollbar-content"
style={{ height: this.props.height }}
>
<QueryTable
columns={[
'state', 'db', 'user', 'time',
'progress', 'rows', 'sql', 'querylink',
]}
onUserClicked={this.onUserClicked.bind(this)}
onDbClicked={this.onDbClicked.bind(this)}
queries={this.state.queriesArray}
actions={this.props.actions}
/>
</div>
</div>
)
}
</div>
);
}
}
QuerySearch.propTypes = propTypes;
export default QuerySearch;
| superset/assets/javascripts/SqlLab/components/QuerySearch.jsx | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0001764647604431957,
0.0001715882826829329,
0.00016524907550774515,
0.00017051698523573577,
0.000003116785364909447
]
|
{
"id": 2,
"code_window": [
"import requests\n",
"import sqlalchemy as sa\n",
"from sqlalchemy import (\n",
" Column, Integer, String, ForeignKey, Text, Boolean,\n",
" DateTime, or_, and_,\n",
")\n",
"from sqlalchemy.orm import backref, relationship\n",
"from dateutil.parser import parse as dparse\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" DateTime, or_,\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 13
} | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from time import sleep
from datetime import datetime
import json
import logging
import uuid
import pandas as pd
import sqlalchemy
from sqlalchemy.pool import NullPool
from sqlalchemy.orm import sessionmaker
from celery.exceptions import SoftTimeLimitExceeded
from superset import (app, db, utils, dataframe, results_backend)
from superset.models.sql_lab import Query
from superset.sql_parse import SupersetQuery
from superset.db_engine_specs import LimitMethod
from superset.jinja_context import get_template_processor
from superset.utils import QueryStatus, get_celery_app
config = app.config
celery_app = get_celery_app(config)
stats_logger = app.config.get('STATS_LOGGER')
SQLLAB_TIMEOUT = config.get('SQLLAB_ASYNC_TIME_LIMIT_SEC', 600)
class SqlLabException(Exception):
pass
def dedup(l, suffix='__'):
"""De-duplicates a list of string by suffixing a counter
Always returns the same number of entries as provided, and always returns
unique values.
>>> print(','.join(dedup(['foo', 'bar', 'bar', 'bar'])))
foo,bar,bar__1,bar__2
"""
new_l = []
seen = {}
for s in l:
if s in seen:
seen[s] += 1
s += suffix + str(seen[s])
else:
seen[s] = 0
new_l.append(s)
return new_l
def get_query(query_id, session, retry_count=5):
"""attemps to get the query and retry if it cannot"""
query = None
attempt = 0
while not query and attempt < retry_count:
try:
query = session.query(Query).filter_by(id=query_id).one()
except Exception:
attempt += 1
logging.error(
"Query with id `{}` could not be retrieved".format(query_id))
stats_logger.incr('error_attempting_orm_query_' + str(attempt))
logging.error("Sleeping for a sec before retrying...")
sleep(1)
if not query:
stats_logger.incr('error_failed_at_getting_orm_query')
raise SqlLabException("Failed at getting query")
return query
def get_session(nullpool):
if nullpool:
engine = sqlalchemy.create_engine(
app.config.get('SQLALCHEMY_DATABASE_URI'), poolclass=NullPool)
session_class = sessionmaker()
session_class.configure(bind=engine)
return session_class()
session = db.session()
session.commit() # HACK
return session
@celery_app.task(bind=True, soft_time_limit=SQLLAB_TIMEOUT)
def get_sql_results(
ctask, query_id, return_results=True, store_results=False, user_name=None):
"""Executes the sql query returns the results."""
try:
return execute_sql(
ctask, query_id, return_results, store_results, user_name)
except Exception as e:
logging.exception(e)
stats_logger.incr('error_sqllab_unhandled')
sesh = get_session(not ctask.request.called_directly)
query = get_query(query_id, sesh)
query.error_message = str(e)
query.status = QueryStatus.FAILED
query.tmp_table_name = None
sesh.commit()
raise
def execute_sql(
ctask, query_id, return_results=True, store_results=False, user_name=None):
"""Executes the sql query returns the results."""
session = get_session(not ctask.request.called_directly)
query = get_query(query_id, session)
payload = dict(query_id=query_id)
database = query.database
db_engine_spec = database.db_engine_spec
db_engine_spec.patch()
def handle_error(msg):
"""Local method handling error while processing the SQL"""
query.error_message = msg
query.status = QueryStatus.FAILED
query.tmp_table_name = None
session.commit()
payload.update({
'status': query.status,
'error': msg,
})
return payload
if store_results and not results_backend:
return handle_error("Results backend isn't configured.")
# Limit enforced only for retrieving the data, not for the CTA queries.
superset_query = SupersetQuery(query.sql)
executed_sql = superset_query.stripped()
if not superset_query.is_select() and not database.allow_dml:
return handle_error(
"Only `SELECT` statements are allowed against this database")
if query.select_as_cta:
if not superset_query.is_select():
return handle_error(
"Only `SELECT` statements can be used with the CREATE TABLE "
"feature.")
return
if not query.tmp_table_name:
start_dttm = datetime.fromtimestamp(query.start_time)
query.tmp_table_name = 'tmp_{}_table_{}'.format(
query.user_id, start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
executed_sql = superset_query.as_create_table(query.tmp_table_name)
query.select_as_cta_used = True
elif (query.limit and superset_query.is_select()
and db_engine_spec.limit_method == LimitMethod.WRAP_SQL):
executed_sql = database.wrap_sql_limit(executed_sql, query.limit)
query.limit_used = True
try:
template_processor = get_template_processor(
database=database, query=query)
executed_sql = template_processor.process_template(executed_sql)
except Exception as e:
logging.exception(e)
msg = "Template rendering failed: " + utils.error_msg_from_exception(e)
return handle_error(msg)
query.executed_sql = executed_sql
query.status = QueryStatus.RUNNING
query.start_running_time = utils.now_as_float()
session.merge(query)
session.commit()
logging.info("Set query to 'running'")
try:
engine = database.get_sqla_engine(
schema=query.schema, nullpool=not ctask.request.called_directly, user_name=user_name)
conn = engine.raw_connection()
cursor = conn.cursor()
logging.info("Running query: \n{}".format(executed_sql))
logging.info(query.executed_sql)
cursor.execute(query.executed_sql,
**db_engine_spec.cursor_execute_kwargs)
logging.info("Handling cursor")
db_engine_spec.handle_cursor(cursor, query, session)
logging.info("Fetching data: {}".format(query.to_dict()))
data = db_engine_spec.fetch_data(cursor, query.limit)
except SoftTimeLimitExceeded as e:
logging.exception(e)
conn.close()
return handle_error(
"SQL Lab timeout. This environment's policy is to kill queries "
"after {} seconds.".format(SQLLAB_TIMEOUT))
except Exception as e:
logging.exception(e)
conn.close()
return handle_error(db_engine_spec.extract_error_message(e))
logging.info("Fetching cursor description")
cursor_description = cursor.description
conn.commit()
conn.close()
if query.status == utils.QueryStatus.STOPPED:
return json.dumps(
{
'query_id': query.id,
'status': query.status,
'query': query.to_dict(),
},
default=utils.json_iso_dttm_ser)
column_names = (
[col[0] for col in cursor_description] if cursor_description else [])
column_names = dedup(column_names)
cdf = dataframe.SupersetDataFrame(
pd.DataFrame(list(data), columns=column_names))
query.rows = cdf.size
query.progress = 100
query.status = QueryStatus.SUCCESS
if query.select_as_cta:
query.select_sql = '{}'.format(
database.select_star(
query.tmp_table_name,
limit=query.limit,
schema=database.force_ctas_schema,
show_cols=False,
latest_partition=False, ))
query.end_time = utils.now_as_float()
session.merge(query)
session.flush()
payload.update({
'status': query.status,
'data': cdf.data if cdf.data else [],
'columns': cdf.columns if cdf.columns else [],
'query': query.to_dict(),
})
if store_results:
key = '{}'.format(uuid.uuid4())
logging.info("Storing results in results backend, key: {}".format(key))
json_payload = json.dumps(payload, default=utils.json_iso_dttm_ser)
results_backend.set(key, utils.zlib_compress(json_payload))
query.results_key = key
query.end_result_backend_time = utils.now_as_float()
session.merge(query)
session.commit()
if return_results:
return payload
| superset/sql_lab.py | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.000543419155292213,
0.00019461823103483766,
0.00016388107906095684,
0.00016816923744045198,
0.00008680171595187858
]
|
{
"id": 2,
"code_window": [
"import requests\n",
"import sqlalchemy as sa\n",
"from sqlalchemy import (\n",
" Column, Integer, String, ForeignKey, Text, Boolean,\n",
" DateTime, or_, and_,\n",
")\n",
"from sqlalchemy.orm import backref, relationship\n",
"from dateutil.parser import parse as dparse\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" DateTime, or_,\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 13
} | import React from 'react';
import { mount } from 'enzyme';
import { describe, it } from 'mocha';
import { expect } from 'chai';
import { user } from './fixtures';
import RecentActivity from '../../../javascripts/profile/components/RecentActivity';
import TableLoader from '../../../javascripts/profile/components/TableLoader';
describe('RecentActivity', () => {
const mockedProps = {
user,
};
it('is valid', () => {
expect(
React.isValidElement(<RecentActivity {...mockedProps} />),
).to.equal(true);
});
it('renders a TableLoader', () => {
const wrapper = mount(<RecentActivity {...mockedProps} />);
expect(wrapper.find(TableLoader)).to.have.length(1);
});
});
| superset/assets/spec/javascripts/profile/RecentActivity_spec.jsx | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017625051259528846,
0.00017512943304609507,
0.00017331494018435478,
0.00017582284635864198,
0.0000012948650010002893
]
|
{
"id": 3,
"code_window": [
"from flask_appbuilder.models.decorators import renders\n",
"from flask_appbuilder import Model\n",
"\n",
"from flask_babel import lazy_gettext as _\n",
"\n",
"from superset import conf, db, import_util, utils, sm, get_session\n",
"from superset.utils import (\n",
" flasher, MetricPermException, DimSelector, DTTM_ALIAS\n",
")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"from superset import conf, db, import_util, utils, sm\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 32
} | # pylint: disable=invalid-unary-operand-type
from collections import OrderedDict
import json
import logging
from copy import deepcopy
from datetime import datetime, timedelta
from six import string_types
from multiprocessing import Pool
import requests
import sqlalchemy as sa
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean,
DateTime, or_, and_,
)
from sqlalchemy.orm import backref, relationship
from dateutil.parser import parse as dparse
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.postaggregator import (
Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality,
)
from pydruid.utils.having import Aggregation
from flask import Markup, escape
from flask_appbuilder.models.decorators import renders
from flask_appbuilder import Model
from flask_babel import lazy_gettext as _
from superset import conf, db, import_util, utils, sm, get_session
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
return datasource.latest_metadata()
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
'type': 'javascript',
'fieldNames': field_names,
'name': name,
'function': function,
}
self.name = name
class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
__tablename__ = 'clusters'
type = "druid"
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
cluster_name = Column(String(250), unique=True)
coordinator_host = Column(String(255))
coordinator_port = Column(Integer, default=8081)
coordinator_endpoint = Column(
String(255), default='druid/coordinator/v1/metadata')
broker_host = Column(String(255))
broker_port = Column(Integer, default=8082)
broker_endpoint = Column(String(255), default='druid/v2')
metadata_last_refreshed = Column(DateTime)
cache_timeout = Column(Integer)
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name
def get_pydruid_client(self):
cli = PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def get_datasources(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
"{obj.coordinator_endpoint}/datasources"
).format(obj=self)
return json.loads(requests.get(endpoint).text)
def get_druid_version(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
).format(obj=self)
return json.loads(requests.get(endpoint).text)['version']
def refresh_datasources(
self,
datasource_name=None,
merge_flag=True,
refreshAll=True):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
ds_list = self.get_datasources()
blacklist = conf.get('DRUID_DATA_SOURCE_BLACKLIST', [])
ds_refresh = []
if not datasource_name:
ds_refresh = list(filter(lambda ds: ds not in blacklist, ds_list))
elif datasource_name not in blacklist and datasource_name in ds_list:
ds_refresh.append(datasource_name)
else:
return
self.refresh_async(ds_refresh, merge_flag, refreshAll)
def refresh_async(self, datasource_names, merge_flag, refreshAll):
"""
Fetches metadata for the specified datasources andm
merges to the Superset database
"""
session = db.session
ds_list = (
session.query(DruidDatasource)
.filter(or_(DruidDatasource.datasource_name == name
for name in datasource_names))
)
ds_map = {ds.name: ds for ds in ds_list}
for ds_name in datasource_names:
datasource = ds_map.get(ds_name, None)
if not datasource:
datasource = DruidDatasource(datasource_name=ds_name)
with session.no_autoflush:
session.add(datasource)
flasher(
"Adding new datasource [{}]".format(ds_name), 'success')
ds_map[ds_name] = datasource
elif refreshAll:
flasher(
"Refreshing datasource [{}]".format(ds_name), 'info')
else:
del ds_map[ds_name]
continue
datasource.cluster = self
datasource.merge_flag = merge_flag
session.flush()
# Prepare multithreaded executation
pool = Pool()
ds_refresh = list(ds_map.values())
metadata = pool.map(_fetch_metadata_for, ds_refresh)
pool.close()
pool.join()
for i in range(0, len(ds_refresh)):
datasource = ds_refresh[i]
cols = metadata[i]
col_objs_list = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == datasource.datasource_name)
.filter(or_(DruidColumn.column_name == col for col in cols))
)
col_objs = {col.column_name: col for col in col_objs_list}
for col in cols:
if col == '__time': # skip the time column
continue
col_obj = col_objs.get(col, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=datasource.datasource_name,
column_name=col)
with session.no_autoflush:
session.add(col_obj)
datatype = cols[col]['type']
if datatype == 'STRING':
col_obj.groupby = True
col_obj.filterable = True
if datatype == 'hyperUnique' or datatype == 'thetaSketch':
col_obj.count_distinct = True
# Allow sum/min/max for long or double
if datatype == 'LONG' or datatype == 'DOUBLE':
col_obj.sum = True
col_obj.min = True
col_obj.max = True
col_obj.type = datatype
col_obj.datasource = datasource
datasource.generate_metrics_for(col_objs_list)
session.commit()
@property
def perm(self):
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
def get_perm(self):
return self.perm
@property
def name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
@property
def unique_name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
class DruidColumn(Model, BaseColumn):
"""ORM model for storing Druid datasource column metadata"""
__tablename__ = 'columns'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
enable_typechecks=False)
dimension_spec_json = Column(Text)
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
)
def __repr__(self):
return self.column_name
@property
def expression(self):
return self.dimension_spec_json
@property
def dimension_spec(self):
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
def get_metrics(self):
metrics = {}
metrics['count'] = DruidMetric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
)
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.is_num:
mt = corrected_type.lower() + 'Sum'
name = 'sum__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.avg and self.is_num:
mt = corrected_type.lower() + 'Avg'
name = 'avg__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.min and self.is_num:
mt = corrected_type.lower() + 'Min'
name = 'min__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
name = 'max__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.count_distinct:
name = 'count_distinct__' + self.column_name
if self.type == 'hyperUnique' or self.type == 'thetaSketch':
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type=self.type,
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
)
else:
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
)
return metrics
def generate_metrics(self):
"""Generate metrics based on the column metadata"""
metrics = self.get_metrics()
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.datasource.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(
DruidMetric.metric_name == m for m in metrics
))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
db.session.add(metric)
@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(DruidColumn).filter(
DruidColumn.datasource_name == lookup_column.datasource_name,
DruidColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
class DruidMetric(Model, BaseMetric):
"""ORM object referencing Druid metrics for a datasource"""
__tablename__ = 'metrics'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
json = Column(Text)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
)
@property
def expression(self):
return self.json
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception:
obj = {}
return obj
@property
def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
) if self.datasource else None
@classmethod
def import_obj(cls, i_metric):
def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_name == lookup_metric.datasource_name,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
class DruidDatasource(Model, BaseDatasource):
"""ORM object referencing Druid datasources (tables)"""
__tablename__ = 'datasources'
type = "druid"
query_langtage = "json"
cluster_class = DruidCluster
metric_class = DruidMetric
column_class = DruidColumn
baselink = "druiddatasourcemodelview"
# Columns
datasource_name = Column(String(255), unique=True)
is_hidden = Column(Boolean, default=False)
fetch_values_from = Column(String(100))
cluster_name = Column(
String(250), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship(
sm.user_model,
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)
@property
def database(self):
return self.cluster
@property
def connection(self):
return str(self.database)
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self):
return self.datasource_name
@property
def schema(self):
ds_name = self.datasource_name or ''
name_pieces = ds_name.split('.')
if len(name_pieces) > 1:
return name_pieces[0]
else:
return None
@property
def schema_perm(self):
"""Returns schema permission if present, cluster one otherwise."""
return utils.get_schema_perm(self.cluster, self.schema)
def get_perm(self):
return (
"[{obj.cluster_name}].[{obj.datasource_name}]"
"(id:{obj.id})").format(obj=self)
@property
def link(self):
name = escape(self.datasource_name)
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
@property
def full_name(self):
return utils.get_datasource_full_name(
self.cluster_name, self.datasource_name)
@property
def time_column_grains(self):
return {
"time_columns": [
'all', '5 seconds', '30 seconds', '1 minute',
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
}
def __repr__(self):
return self.datasource_name
@renders('datasource_name')
def datasource_link(self):
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
name = escape(self.datasource_name)
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
@classmethod
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_datasource(d):
return db.session.query(DruidDatasource).join(DruidCluster).filter(
DruidDatasource.datasource_name == d.datasource_name,
DruidCluster.cluster_name == d.cluster_name,
).first()
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)
@staticmethod
def version_higher(v1, v2):
"""is v1 higher than v2
>>> DruidDatasource.version_higher('0.8.2', '0.9.1')
False
>>> DruidDatasource.version_higher('0.8.2', '0.6.1')
True
>>> DruidDatasource.version_higher('0.8.2', '0.8.2')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9')
False
"""
def int_or_0(v):
try:
v = int(v)
except (TypeError, ValueError):
v = 0
return v
v1nums = [int_or_0(n) for n in v1.split('.')]
v2nums = [int_or_0(n) for n in v2.split('.')]
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return v1nums[0] > v2nums[0] or \
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])
def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
logging.info("Syncing datasource [{}]".format(self.datasource_name))
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['maxTime']
max_time = dparse(max_time)
# Query segmentMetadata for 7 days back. However, due to a bug,
# we need to set this interval to more than 1 day ago to exclude
# realtime segments, which triggered a bug (fixed in druid 0.8.2).
# https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
lbound = (max_time - timedelta(days=7)).isoformat()
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = (max_time - timedelta(1)).isoformat()
else:
rbound = max_time.isoformat()
segment_metadata = None
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed first attempt to get latest segment")
logging.exception(e)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = datetime.now().isoformat()
else:
rbound = datetime(2050, 1, 1).isoformat()[:10]
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed 2nd attempt to get latest segment")
logging.exception(e)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
self.generate_metrics_for(self.columns)
def generate_metrics_for(self, columns):
metrics = {}
for col in columns:
metrics.update(col.get_metrics())
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(DruidMetric.metric_name == m for m in metrics))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
with db.session.no_autoflush:
db.session.add(metric)
@classmethod
def sync_to_db_from_config(
cls,
druid_config,
user,
cluster,
refresh=True):
"""Merges the ds config from druid_config into one stored in the db."""
session = db.session
datasource = (
session.query(cls)
.filter_by(datasource_name=druid_config['name'])
.first()
)
# Create a new datasource.
if not datasource:
datasource = cls(
datasource_name=druid_config['name'],
cluster=cluster,
owner=user,
changed_by_fk=user.id,
created_by_fk=user.id,
)
session.add(datasource)
elif not refresh:
return
dimensions = druid_config['dimensions']
col_objs = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == druid_config['name'])
.filter(or_(DruidColumn.column_name == dim for dim in dimensions))
)
col_objs = {col.column_name: col for col in col_objs}
for dim in dimensions:
col_obj = col_objs.get(dim, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=druid_config['name'],
column_name=dim,
groupby=True,
filterable=True,
# TODO: fetch type from Hive.
type="STRING",
datasource=datasource,
)
session.add(col_obj)
# Import Druid metrics
metric_objs = (
session.query(DruidMetric)
.filter(DruidMetric.datasource_name == druid_config['name'])
.filter(or_(DruidMetric.metric_name == spec['name']
for spec in druid_config["metrics_spec"]))
)
metric_objs = {metric.metric_name: metric for metric in metric_objs}
for metric_spec in druid_config["metrics_spec"]:
metric_name = metric_spec["name"]
metric_type = metric_spec["type"]
metric_json = json.dumps(metric_spec)
if metric_type == "count":
metric_type = "longSum"
metric_json = json.dumps({
"type": "longSum",
"name": metric_name,
"fieldName": metric_name,
})
metric_obj = metric_objs.get(metric_name, None)
if not metric_obj:
metric_obj = DruidMetric(
metric_name=metric_name,
metric_type=metric_type,
verbose_name="%s(%s)" % (metric_type, metric_name),
datasource=datasource,
json=metric_json,
description=(
"Imported from the airolap config dir for %s" %
druid_config['name']),
)
session.add(metric_obj)
session.commit()
@staticmethod
def time_offset(granularity):
if granularity == 'week_ending_saturday':
return 6 * 24 * 3600 * 1000 # 6 days
return 0
# uses https://en.wikipedia.org/wiki/ISO_8601
# http://druid.io/docs/0.8.0/querying/granularities.html
# TODO: pass origin from the UI
@staticmethod
def granularity(period_name, timezone=None, origin=None):
if not period_name or period_name == 'all':
return 'all'
iso_8601_dict = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
}
granularity = {'type': 'period'}
if timezone:
granularity['timeZone'] = timezone
if origin:
dttm = utils.parse_human_datetime(origin)
granularity['origin'] = dttm.isoformat()
if period_name in iso_8601_dict:
granularity['period'] = iso_8601_dict[period_name]
if period_name in ('week_ending_saturday', 'week_starting_sunday'):
# use Sunday as start of the week
granularity['origin'] = '2016-01-03T00:00:00'
elif not isinstance(period_name, string_types):
granularity['type'] = 'duration'
granularity['duration'] = period_name
elif period_name.startswith('P'):
# identify if the string is the iso_8601 period
granularity['period'] = period_name
else:
granularity['type'] = 'duration'
granularity['duration'] = utils.parse_human_timedelta(
period_name).total_seconds() * 1000
return granularity
@staticmethod
def _metrics_and_post_aggs(metrics, metrics_dict):
all_metrics = []
post_aggs = {}
def recursive_get_fields(_conf):
_type = _conf.get('type')
_field = _conf.get('field')
_fields = _conf.get('fields')
field_names = []
if _type in ['fieldAccess', 'hyperUniqueCardinality',
'quantile', 'quantiles']:
field_names.append(_conf.get('fieldName', ''))
if _field:
field_names += recursive_get_fields(_field)
if _fields:
for _f in _fields:
field_names += recursive_get_fields(_f)
return list(set(field_names))
for metric_name in metrics:
metric = metrics_dict[metric_name]
if metric.metric_type != 'postagg':
all_metrics.append(metric_name)
else:
mconf = metric.json_obj
all_metrics += recursive_get_fields(mconf)
all_metrics += mconf.get('fieldNames', [])
if mconf.get('type') == 'javascript':
post_aggs[metric_name] = JavascriptPostAggregator(
name=mconf.get('name', ''),
field_names=mconf.get('fieldNames', []),
function=mconf.get('function', ''))
elif mconf.get('type') == 'quantile':
post_aggs[metric_name] = Quantile(
mconf.get('name', ''),
mconf.get('probability', ''),
)
elif mconf.get('type') == 'quantiles':
post_aggs[metric_name] = Quantiles(
mconf.get('name', ''),
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
mconf.get('fn', "/"),
mconf.get('fields', []),
mconf.get('name', ''))
else:
post_aggs[metric_name] = CustomPostAggregator(
mconf.get('name', ''),
mconf)
return all_metrics, post_aggs
def values_for_column(self,
column_name,
limit=10000):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
else:
from_dttm = datetime(1970, 1, 1)
qry = dict(
datasource=self.datasource_name,
granularity="all",
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
aggregations=dict(count=count("count")),
dimension=column_name,
metric="count",
threshold=limit,
)
client = self.cluster.get_pydruid_client()
client.topn(**qry)
df = client.export_pandas()
return [row[column_name] for row in df.to_records(index=False)]
def get_query_str(self, query_obj, phase=1, client=None):
return self.run_query(client=client, phase=phase, **query_obj)
def _add_filter_from_pre_query_data(self, df, dimensions, dim_filter):
ret = dim_filter
if df is not None and not df.empty:
new_filters = []
for unused, row in df.iterrows():
fields = []
for dim in dimensions:
f = Dimension(dim) == row[dim]
fields.append(f)
if len(fields) > 1:
term = Filter(type="and", fields=fields)
new_filters.append(term)
elif fields:
new_filters.append(fields[0])
if new_filters:
ff = Filter(type="or", fields=new_filters)
if not dim_filter:
ret = ff
else:
ret = Filter(type="and", fields=[ff, dim_filter])
return ret
def run_query( # noqa / druid
self,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
row_limit=None,
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None,
order_desc=True):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
timezone = from_dttm.tzname()
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
columns_dict = {c.column_name: c for c in self.columns}
all_metrics, post_aggs = self._metrics_and_post_aggs(
metrics,
metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
if m.metric_name in all_metrics:
aggregations[m.metric_name] = m.json_obj
rejected_metrics = [
m.metric_name for m in self.metrics
if m.is_restricted and
m.metric_name in aggregations.keys() and
not sm.has_access('metric_access', m.perm)
]
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
)
# the dimensions list with dimensionSpecs expanded
dimensions = []
groupby = [gb for gb in groupby if gb in columns_dict]
for column_name in groupby:
col = columns_dict.get(column_name)
dim_spec = col.dimension_spec
if dim_spec:
dimensions.append(dim_spec)
else:
dimensions.append(column_name)
qry = dict(
datasource=self.datasource_name,
dimensions=dimensions,
aggregations=aggregations,
granularity=DruidDatasource.granularity(
granularity,
timezone=timezone,
origin=extras.get('druid_time_origin'),
),
post_aggregations=post_aggs,
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = self.get_filters(filter)
if filters:
qry['filter'] = filters
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters
order_direction = "descending" if order_desc else "ascending"
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1 and order_desc:
dim = list(qry.get('dimensions'))[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
else:
order_by = list(qry['aggregations'].keys())[0]
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['threshold'] = min(row_limit,
timeseries_limit or row_limit)
pre_qry['metric'] = order_by
pre_qry['dimension'] = dim
del pre_qry['dimensions']
client.topn(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
qry['dimension'] = dim
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters or not order_desc:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
order_by = metrics[0] if metrics else self.metrics[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": min(timeseries_limit, row_limit),
'intervals': (
inner_from_dttm.isoformat() + '/' +
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": order_direction,
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": order_direction,
}],
}
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str
def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(
client=client, query_obj=query_obj, phase=2)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]
# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]
time_offset = DruidDatasource.time_offset(query_obj['granularity'])
def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)
if DTTM_ALIAS in df.columns and time_offset:
df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
def get_filters(self, raw_filters): # noqa
filters = None
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ('in', 'not in'):
eq = [
types.replace("'", '').strip()
if isinstance(types, string_types)
else types
for types in eq]
elif not isinstance(flt['val'], string_types):
eq = eq[0] if len(eq) > 0 else ''
if col in self.num_cols:
if op in ('in', 'not in'):
eq = [utils.string_to_num(v) for v in eq]
else:
eq = utils.string_to_num(eq)
if op == '==':
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
if len(eq) > 1:
for s in eq:
fields.append(Dimension(col) == s)
cond = Filter(type="or", fields=fields)
elif len(eq) == 1:
cond = Dimension(col) == eq[0]
if op == 'not in':
cond = ~cond
elif op == 'regex':
cond = Filter(type="regex", pattern=eq, dimension=col)
elif op == '>=':
cond = Dimension(col) >= eq
elif op == '<=':
cond = Dimension(col) <= eq
elif op == '>':
cond = Dimension(col) > eq
elif op == '<':
cond = Dimension(col) < eq
if filters:
filters = Filter(type="and", fields=[
cond,
filters
])
else:
filters = cond
return filters
def _get_having_obj(self, col, op, eq):
cond = None
if op == '==':
if col in self.column_names:
cond = DimSelector(dimension=col, value=eq)
else:
cond = Aggregation(col) == eq
elif op == '>':
cond = Aggregation(col) > eq
elif op == '<':
cond = Aggregation(col) < eq
return cond
def get_having_filters(self, raw_filters):
filters = None
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
}
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ['==', '>', '<']:
cond = self._get_having_obj(col, op, eq)
elif op in reversed_op_map:
cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
if filters:
filters = filters & cond
else:
filters = cond
return filters
@classmethod
def query_datasources_by_name(
cls, session, database, datasource_name, schema=None):
return (
session.query(cls)
.filter_by(cluster_name=database.id)
.filter_by(datasource_name=datasource_name)
.all()
)
sa.event.listen(DruidDatasource, 'after_insert', set_perm)
sa.event.listen(DruidDatasource, 'after_update', set_perm)
| superset/connectors/druid/models.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.9929569959640503,
0.008503100834786892,
0.00016301828145515174,
0.00017144923913292587,
0.08950242400169373
]
|
{
"id": 3,
"code_window": [
"from flask_appbuilder.models.decorators import renders\n",
"from flask_appbuilder import Model\n",
"\n",
"from flask_babel import lazy_gettext as _\n",
"\n",
"from superset import conf, db, import_util, utils, sm, get_session\n",
"from superset.utils import (\n",
" flasher, MetricPermException, DimSelector, DTTM_ALIAS\n",
")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"from superset import conf, db, import_util, utils, sm\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 32
} | .world_map svg {
background-color: #feffff;
}
.world_map {
position: relative;
}
| superset/assets/visualizations/world_map.css | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.000176135785295628,
0.000176135785295628,
0.000176135785295628,
0.000176135785295628,
0
]
|
{
"id": 3,
"code_window": [
"from flask_appbuilder.models.decorators import renders\n",
"from flask_appbuilder import Model\n",
"\n",
"from flask_babel import lazy_gettext as _\n",
"\n",
"from superset import conf, db, import_util, utils, sm, get_session\n",
"from superset.utils import (\n",
" flasher, MetricPermException, DimSelector, DTTM_ALIAS\n",
")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"from superset import conf, db, import_util, utils, sm\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 32
} | export const user = {
username: 'alpha',
roles: {
Alpha: [
[
'can_this_form_post',
'ResetMyPasswordView',
],
[
'can_this_form_get',
'ResetMyPasswordView',
],
[
'can_this_form_post',
'UserInfoEditView',
],
[
'can_this_form_get',
'UserInfoEditView',
],
],
sql_lab: [
[
'menu_access',
'SQL Lab',
],
[
'can_sql_json',
'Superset',
],
[
'can_search_queries',
'Superset',
],
[
'can_csv',
'Superset',
],
],
},
firstName: 'alpha',
lastName: 'alpha',
createdOn: '2016-11-11T12:34:17',
userId: 5,
email: '[email protected]',
isActive: true,
permissions: {
datasource_access: ['table1', 'table2'],
database_access: ['db1', 'db2', 'db3'],
},
};
export const userNoPerms = Object.assign({}, user, { permissions: {} });
| superset/assets/spec/javascripts/profile/fixtures.jsx | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017658494471106678,
0.00017364071391057223,
0.00017067493172362447,
0.00017370685236528516,
0.00000199800933842198
]
|
{
"id": 3,
"code_window": [
"from flask_appbuilder.models.decorators import renders\n",
"from flask_appbuilder import Model\n",
"\n",
"from flask_babel import lazy_gettext as _\n",
"\n",
"from superset import conf, db, import_util, utils, sm, get_session\n",
"from superset.utils import (\n",
" flasher, MetricPermException, DimSelector, DTTM_ALIAS\n",
")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"from superset import conf, db, import_util, utils, sm\n"
],
"file_path": "superset/connectors/druid/models.py",
"type": "replace",
"edit_start_line_idx": 32
} | """Unit tests for Superset"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from sqlalchemy.orm.session import make_transient
import json
import pickle
import unittest
from superset import db
from superset.models import core as models
from superset.connectors.druid.models import (
DruidDatasource, DruidColumn, DruidMetric)
from superset.connectors.sqla.models import SqlaTable, TableColumn, SqlMetric
from .base_tests import SupersetTestCase
class ImportExportTests(SupersetTestCase):
"""Testing export import functionality for dashboards"""
def __init__(self, *args, **kwargs):
super(ImportExportTests, self).__init__(*args, **kwargs)
@classmethod
def delete_imports(cls):
# Imported data clean up
session = db.session
for slc in session.query(models.Slice):
if 'remote_id' in slc.params_dict:
session.delete(slc)
for dash in session.query(models.Dashboard):
if 'remote_id' in dash.params_dict:
session.delete(dash)
for table in session.query(SqlaTable):
if 'remote_id' in table.params_dict:
session.delete(table)
for datasource in session.query(DruidDatasource):
if 'remote_id' in datasource.params_dict:
session.delete(datasource)
session.commit()
@classmethod
def setUpClass(cls):
cls.delete_imports()
@classmethod
def tearDownClass(cls):
cls.delete_imports()
def create_slice(self, name, ds_id=None, id=None, db_name='main',
table_name='wb_health_population'):
params = {
'num_period_compare': '10',
'remote_id': id,
'datasource_name': table_name,
'database_name': db_name,
'schema': '',
# Test for trailing commas
"metrics": [
"sum__signup_attempt_email",
"sum__signup_attempt_facebook",
],
}
if table_name and not ds_id:
table = self.get_table_by_name(table_name)
if table:
ds_id = table.id
return models.Slice(
slice_name=name,
datasource_type='table',
viz_type='bubble',
params=json.dumps(params),
datasource_id=ds_id,
id=id
)
def create_dashboard(self, title, id=0, slcs=[]):
json_metadata = {'remote_id': id}
return models.Dashboard(
id=id,
dashboard_title=title,
slices=slcs,
position_json='{"size_y": 2, "size_x": 2}',
slug='{}_imported'.format(title.lower()),
json_metadata=json.dumps(json_metadata)
)
def create_table(
self, name, schema='', id=0, cols_names=[], metric_names=[]):
params = {'remote_id': id, 'database_name': 'main'}
table = SqlaTable(
id=id,
schema=schema,
table_name=name,
params=json.dumps(params)
)
for col_name in cols_names:
table.columns.append(
TableColumn(column_name=col_name))
for metric_name in metric_names:
table.metrics.append(SqlMetric(metric_name=metric_name))
return table
def create_druid_datasource(
self, name, id=0, cols_names=[], metric_names=[]):
params = {'remote_id': id, 'database_name': 'druid_test'}
datasource = DruidDatasource(
id=id,
datasource_name=name,
cluster_name='druid_test',
params=json.dumps(params)
)
for col_name in cols_names:
datasource.columns.append(
DruidColumn(column_name=col_name))
for metric_name in metric_names:
datasource.metrics.append(DruidMetric(
metric_name=metric_name))
return datasource
def get_slice(self, slc_id):
return db.session.query(models.Slice).filter_by(id=slc_id).first()
def get_slice_by_name(self, name):
return db.session.query(models.Slice).filter_by(
slice_name=name).first()
def get_dash(self, dash_id):
return db.session.query(models.Dashboard).filter_by(
id=dash_id).first()
def get_dash_by_slug(self, dash_slug):
return db.session.query(models.Dashboard).filter_by(
slug=dash_slug).first()
def get_datasource(self, datasource_id):
return db.session.query(DruidDatasource).filter_by(
id=datasource_id).first()
def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(
table_name=name).first()
def assert_dash_equals(self, expected_dash, actual_dash,
check_position=True):
self.assertEquals(expected_dash.slug, actual_dash.slug)
self.assertEquals(
expected_dash.dashboard_title, actual_dash.dashboard_title)
self.assertEquals(
len(expected_dash.slices), len(actual_dash.slices))
expected_slices = sorted(
expected_dash.slices, key=lambda s: s.slice_name)
actual_slices = sorted(
actual_dash.slices, key=lambda s: s.slice_name)
for e_slc, a_slc in zip(expected_slices, actual_slices):
self.assert_slice_equals(e_slc, a_slc)
if check_position:
self.assertEquals(
expected_dash.position_json, actual_dash.position_json)
def assert_table_equals(self, expected_ds, actual_ds):
self.assertEquals(expected_ds.table_name, actual_ds.table_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(expected_ds.schema, actual_ds.schema)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]))
self.assertEquals(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]))
def assert_datasource_equals(self, expected_ds, actual_ds):
self.assertEquals(
expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]))
self.assertEquals(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]))
def assert_slice_equals(self, expected_slc, actual_slc):
self.assertEquals(expected_slc.slice_name, actual_slc.slice_name)
self.assertEquals(
expected_slc.datasource_type, actual_slc.datasource_type)
self.assertEquals(expected_slc.viz_type, actual_slc.viz_type)
self.assertEquals(
json.loads(expected_slc.params), json.loads(actual_slc.params))
def test_export_1_dashboard(self):
birth_dash = self.get_dash_by_slug('births')
export_dash_url = (
'/dashboardmodelview/export_dashboards_form?id={}&action=go'
.format(birth_dash.id)
)
resp = self.client.get(export_dash_url)
exported_dashboards = pickle.loads(resp.data)['dashboards']
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
birth_dash.id,
json.loads(exported_dashboards[0].json_metadata)['remote_id'])
exported_tables = pickle.loads(resp.data)['datasources']
self.assertEquals(1, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name('birth_names'), exported_tables[0])
def test_export_2_dashboards(self):
birth_dash = self.get_dash_by_slug('births')
world_health_dash = self.get_dash_by_slug('world_health')
export_dash_url = (
'/dashboardmodelview/export_dashboards_form?id={}&id={}&action=go'
.format(birth_dash.id, world_health_dash.id))
resp = self.client.get(export_dash_url)
exported_dashboards = sorted(pickle.loads(resp.data)['dashboards'],
key=lambda d: d.dashboard_title)
self.assertEquals(2, len(exported_dashboards))
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
birth_dash.id,
json.loads(exported_dashboards[0].json_metadata)['remote_id']
)
self.assert_dash_equals(world_health_dash, exported_dashboards[1])
self.assertEquals(
world_health_dash.id,
json.loads(exported_dashboards[1].json_metadata)['remote_id']
)
exported_tables = sorted(
pickle.loads(resp.data)['datasources'], key=lambda t: t.table_name)
self.assertEquals(2, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name('birth_names'), exported_tables[0])
self.assert_table_equals(
self.get_table_by_name('wb_health_population'), exported_tables[1])
def test_import_1_slice(self):
expected_slice = self.create_slice('Import Me', id=10001);
slc_id = models.Slice.import_obj(expected_slice, import_time=1989)
slc = self.get_slice(slc_id)
self.assertEquals(slc.datasource.perm, slc.perm)
self.assert_slice_equals(expected_slice, slc)
table_id = self.get_table_by_name('wb_health_population').id
self.assertEquals(table_id, self.get_slice(slc_id).datasource_id)
def test_import_2_slices_for_same_table(self):
table_id = self.get_table_by_name('wb_health_population').id
# table_id != 666, import func will have to find the table
slc_1 = self.create_slice('Import Me 1', ds_id=666, id=10002)
slc_id_1 = models.Slice.import_obj(slc_1)
slc_2 = self.create_slice('Import Me 2', ds_id=666, id=10003)
slc_id_2 = models.Slice.import_obj(slc_2)
imported_slc_1 = self.get_slice(slc_id_1)
imported_slc_2 = self.get_slice(slc_id_2)
self.assertEquals(table_id, imported_slc_1.datasource_id)
self.assert_slice_equals(slc_1, imported_slc_1)
self.assertEquals(imported_slc_1.datasource.perm, imported_slc_1.perm)
self.assertEquals(table_id, imported_slc_2.datasource_id)
self.assert_slice_equals(slc_2, imported_slc_2)
self.assertEquals(imported_slc_2.datasource.perm, imported_slc_2.perm)
def test_import_slices_for_non_existent_table(self):
with self.assertRaises(IndexError):
models.Slice.import_obj(self.create_slice(
'Import Me 3', id=10004, table_name='non_existent'))
def test_import_slices_override(self):
slc = self.create_slice('Import Me New', id=10005)
slc_1_id = models.Slice.import_obj(slc, import_time=1990)
slc.slice_name = 'Import Me New'
slc_2_id = models.Slice.import_obj(
self.create_slice('Import Me New', id=10005), import_time=1990)
self.assertEquals(slc_1_id, slc_2_id)
imported_slc = self.get_slice(slc_2_id)
self.assert_slice_equals(slc, imported_slc)
def test_import_empty_dashboard(self):
empty_dash = self.create_dashboard('empty_dashboard', id=10001)
imported_dash_id = models.Dashboard.import_obj(
empty_dash, import_time=1989)
imported_dash = self.get_dash(imported_dash_id)
self.assert_dash_equals(
empty_dash, imported_dash, check_position=False)
def test_import_dashboard_1_slice(self):
slc = self.create_slice('health_slc', id=10006)
dash_with_1_slice = self.create_dashboard(
'dash_with_1_slice', slcs=[slc], id=10002)
dash_with_1_slice.position_json = """
[{{
"col": 5,
"row": 10,
"size_x": 4,
"size_y": 2,
"slice_id": "{}"
}}]
""".format(slc.id)
imported_dash_id = models.Dashboard.import_obj(
dash_with_1_slice, import_time=1990)
imported_dash = self.get_dash(imported_dash_id)
expected_dash = self.create_dashboard(
'dash_with_1_slice', slcs=[slc], id=10002)
make_transient(expected_dash)
self.assert_dash_equals(
expected_dash, imported_dash, check_position=False)
self.assertEquals({"remote_id": 10002, "import_time": 1990},
json.loads(imported_dash.json_metadata))
expected_position = dash_with_1_slice.position_array
expected_position[0]['slice_id'] = '{}'.format(
imported_dash.slices[0].id)
self.assertEquals(expected_position, imported_dash.position_array)
def test_import_dashboard_2_slices(self):
e_slc = self.create_slice('e_slc', id=10007, table_name='energy_usage')
b_slc = self.create_slice('b_slc', id=10008, table_name='birth_names')
dash_with_2_slices = self.create_dashboard(
'dash_with_2_slices', slcs=[e_slc, b_slc], id=10003)
dash_with_2_slices.json_metadata = json.dumps({
"remote_id": 10003,
"filter_immune_slices": ["{}".format(e_slc.id)],
"expanded_slices": {
"{}".format(e_slc.id): True,
"{}".format(b_slc.id): False
}
})
imported_dash_id = models.Dashboard.import_obj(
dash_with_2_slices, import_time=1991)
imported_dash = self.get_dash(imported_dash_id)
expected_dash = self.create_dashboard(
'dash_with_2_slices', slcs=[e_slc, b_slc], id=10003)
make_transient(expected_dash)
self.assert_dash_equals(
imported_dash, expected_dash, check_position=False)
i_e_slc = self.get_slice_by_name('e_slc')
i_b_slc = self.get_slice_by_name('b_slc')
expected_json_metadata = {
"remote_id": 10003,
"import_time": 1991,
"filter_immune_slices": ["{}".format(i_e_slc.id)],
"expanded_slices": {
'{}'.format(i_e_slc.id): True,
'{}'.format(i_b_slc.id): False
}
}
self.assertEquals(expected_json_metadata,
json.loads(imported_dash.json_metadata))
def test_import_override_dashboard_2_slices(self):
e_slc = self.create_slice('e_slc', id=10009, table_name='energy_usage')
b_slc = self.create_slice('b_slc', id=10010, table_name='birth_names')
dash_to_import = self.create_dashboard(
'override_dashboard', slcs=[e_slc, b_slc], id=10004)
imported_dash_id_1 = models.Dashboard.import_obj(
dash_to_import, import_time=1992)
# create new instances of the slices
e_slc = self.create_slice(
'e_slc', id=10009, table_name='energy_usage')
b_slc = self.create_slice(
'b_slc', id=10010, table_name='birth_names')
c_slc = self.create_slice('c_slc', id=10011, table_name='birth_names')
dash_to_import_override = self.create_dashboard(
'override_dashboard_new', slcs=[e_slc, b_slc, c_slc], id=10004)
imported_dash_id_2 = models.Dashboard.import_obj(
dash_to_import_override, import_time=1992)
# override doesn't change the id
self.assertEquals(imported_dash_id_1, imported_dash_id_2)
expected_dash = self.create_dashboard(
'override_dashboard_new', slcs=[e_slc, b_slc, c_slc], id=10004)
make_transient(expected_dash)
imported_dash = self.get_dash(imported_dash_id_2)
self.assert_dash_equals(
expected_dash, imported_dash, check_position=False)
self.assertEquals({"remote_id": 10004, "import_time": 1992},
json.loads(imported_dash.json_metadata))
def test_import_table_no_metadata(self):
table = self.create_table('pure_table', id=10001)
imported_id = SqlaTable.import_obj(table, import_time=1989)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
def test_import_table_1_col_1_met(self):
table = self.create_table(
'table_1_col_1_met', id=10002,
cols_names=["col1"], metric_names=["metric1"])
imported_id = SqlaTable.import_obj(table, import_time=1990)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
self.assertEquals(
{'remote_id': 10002, 'import_time': 1990, 'database_name': 'main'},
json.loads(imported.params))
def test_import_table_2_col_2_met(self):
table = self.create_table(
'table_2_col_2_met', id=10003, cols_names=['c1', 'c2'],
metric_names=['m1', 'm2'])
imported_id = SqlaTable.import_obj(table, import_time=1991)
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
def test_import_table_override(self):
table = self.create_table(
'table_override', id=10003, cols_names=['col1'],
metric_names=['m1'])
imported_id = SqlaTable.import_obj(table, import_time=1991)
table_over = self.create_table(
'table_override', id=10003, cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_id = SqlaTable.import_obj(
table_over, import_time=1992)
imported_over = self.get_table(imported_over_id)
self.assertEquals(imported_id, imported_over.id)
expected_table = self.create_table(
'table_override', id=10003, metric_names=['new_metric1', 'm1'],
cols_names=['col1', 'new_col1', 'col2', 'col3'])
self.assert_table_equals(expected_table, imported_over)
def test_import_table_override_idential(self):
table = self.create_table(
'copy_cat', id=10004, cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_id = SqlaTable.import_obj(table, import_time=1993)
copy_table = self.create_table(
'copy_cat', id=10004, cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_id_copy = SqlaTable.import_obj(
copy_table, import_time=1994)
self.assertEquals(imported_id, imported_id_copy)
self.assert_table_equals(copy_table, self.get_table(imported_id))
def test_import_druid_no_metadata(self):
datasource = self.create_druid_datasource('pure_druid', id=10001)
imported_id = DruidDatasource.import_obj(
datasource, import_time=1989)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
def test_import_druid_1_col_1_met(self):
datasource = self.create_druid_datasource(
'druid_1_col_1_met', id=10002,
cols_names=["col1"], metric_names=["metric1"])
imported_id = DruidDatasource.import_obj(
datasource, import_time=1990)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
self.assertEquals(
{'remote_id': 10002, 'import_time': 1990,
'database_name': 'druid_test'},
json.loads(imported.params))
def test_import_druid_2_col_2_met(self):
datasource = self.create_druid_datasource(
'druid_2_col_2_met', id=10003, cols_names=['c1', 'c2'],
metric_names=['m1', 'm2'])
imported_id = DruidDatasource.import_obj(
datasource, import_time=1991)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
def test_import_druid_override(self):
datasource = self.create_druid_datasource(
'druid_override', id=10003, cols_names=['col1'],
metric_names=['m1'])
imported_id = DruidDatasource.import_obj(
datasource, import_time=1991)
table_over = self.create_druid_datasource(
'druid_override', id=10003,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_id = DruidDatasource.import_obj(
table_over, import_time=1992)
imported_over = self.get_datasource(imported_over_id)
self.assertEquals(imported_id, imported_over.id)
expected_datasource = self.create_druid_datasource(
'druid_override', id=10003, metric_names=['new_metric1', 'm1'],
cols_names=['col1', 'new_col1', 'col2', 'col3'])
self.assert_datasource_equals(expected_datasource, imported_over)
def test_import_druid_override_idential(self):
datasource = self.create_druid_datasource(
'copy_cat', id=10004, cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_id = DruidDatasource.import_obj(
datasource, import_time=1993)
copy_datasource = self.create_druid_datasource(
'copy_cat', id=10004, cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_id_copy = DruidDatasource.import_obj(
copy_datasource, import_time=1994)
self.assertEquals(imported_id, imported_id_copy)
self.assert_datasource_equals(
copy_datasource, self.get_datasource(imported_id))
if __name__ == '__main__':
unittest.main()
| tests/import_export_tests.py | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0002501884300727397,
0.0001752894459059462,
0.00016501099162269384,
0.00017220746667589992,
0.000014159775673761033
]
|
{
"id": 4,
"code_window": [
"from datetime import datetime\n",
"import logging\n",
"\n",
"import sqlalchemy as sqla\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n",
"from flask_appbuilder.models.sqla.interface import SQLAInterface\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/druid/views.py",
"type": "replace",
"edit_start_line_idx": 3
} | # pylint: disable=invalid-unary-operand-type
from collections import OrderedDict
import json
import logging
from copy import deepcopy
from datetime import datetime, timedelta
from six import string_types
from multiprocessing import Pool
import requests
import sqlalchemy as sa
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean,
DateTime, or_, and_,
)
from sqlalchemy.orm import backref, relationship
from dateutil.parser import parse as dparse
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.postaggregator import (
Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality,
)
from pydruid.utils.having import Aggregation
from flask import Markup, escape
from flask_appbuilder.models.decorators import renders
from flask_appbuilder import Model
from flask_babel import lazy_gettext as _
from superset import conf, db, import_util, utils, sm, get_session
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
return datasource.latest_metadata()
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
'type': 'javascript',
'fieldNames': field_names,
'name': name,
'function': function,
}
self.name = name
class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
__tablename__ = 'clusters'
type = "druid"
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
cluster_name = Column(String(250), unique=True)
coordinator_host = Column(String(255))
coordinator_port = Column(Integer, default=8081)
coordinator_endpoint = Column(
String(255), default='druid/coordinator/v1/metadata')
broker_host = Column(String(255))
broker_port = Column(Integer, default=8082)
broker_endpoint = Column(String(255), default='druid/v2')
metadata_last_refreshed = Column(DateTime)
cache_timeout = Column(Integer)
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name
def get_pydruid_client(self):
cli = PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def get_datasources(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
"{obj.coordinator_endpoint}/datasources"
).format(obj=self)
return json.loads(requests.get(endpoint).text)
def get_druid_version(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
).format(obj=self)
return json.loads(requests.get(endpoint).text)['version']
def refresh_datasources(
self,
datasource_name=None,
merge_flag=True,
refreshAll=True):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
ds_list = self.get_datasources()
blacklist = conf.get('DRUID_DATA_SOURCE_BLACKLIST', [])
ds_refresh = []
if not datasource_name:
ds_refresh = list(filter(lambda ds: ds not in blacklist, ds_list))
elif datasource_name not in blacklist and datasource_name in ds_list:
ds_refresh.append(datasource_name)
else:
return
self.refresh_async(ds_refresh, merge_flag, refreshAll)
def refresh_async(self, datasource_names, merge_flag, refreshAll):
"""
Fetches metadata for the specified datasources andm
merges to the Superset database
"""
session = db.session
ds_list = (
session.query(DruidDatasource)
.filter(or_(DruidDatasource.datasource_name == name
for name in datasource_names))
)
ds_map = {ds.name: ds for ds in ds_list}
for ds_name in datasource_names:
datasource = ds_map.get(ds_name, None)
if not datasource:
datasource = DruidDatasource(datasource_name=ds_name)
with session.no_autoflush:
session.add(datasource)
flasher(
"Adding new datasource [{}]".format(ds_name), 'success')
ds_map[ds_name] = datasource
elif refreshAll:
flasher(
"Refreshing datasource [{}]".format(ds_name), 'info')
else:
del ds_map[ds_name]
continue
datasource.cluster = self
datasource.merge_flag = merge_flag
session.flush()
# Prepare multithreaded executation
pool = Pool()
ds_refresh = list(ds_map.values())
metadata = pool.map(_fetch_metadata_for, ds_refresh)
pool.close()
pool.join()
for i in range(0, len(ds_refresh)):
datasource = ds_refresh[i]
cols = metadata[i]
col_objs_list = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == datasource.datasource_name)
.filter(or_(DruidColumn.column_name == col for col in cols))
)
col_objs = {col.column_name: col for col in col_objs_list}
for col in cols:
if col == '__time': # skip the time column
continue
col_obj = col_objs.get(col, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=datasource.datasource_name,
column_name=col)
with session.no_autoflush:
session.add(col_obj)
datatype = cols[col]['type']
if datatype == 'STRING':
col_obj.groupby = True
col_obj.filterable = True
if datatype == 'hyperUnique' or datatype == 'thetaSketch':
col_obj.count_distinct = True
# Allow sum/min/max for long or double
if datatype == 'LONG' or datatype == 'DOUBLE':
col_obj.sum = True
col_obj.min = True
col_obj.max = True
col_obj.type = datatype
col_obj.datasource = datasource
datasource.generate_metrics_for(col_objs_list)
session.commit()
@property
def perm(self):
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
def get_perm(self):
return self.perm
@property
def name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
@property
def unique_name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
class DruidColumn(Model, BaseColumn):
"""ORM model for storing Druid datasource column metadata"""
__tablename__ = 'columns'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
enable_typechecks=False)
dimension_spec_json = Column(Text)
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
)
def __repr__(self):
return self.column_name
@property
def expression(self):
return self.dimension_spec_json
@property
def dimension_spec(self):
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
def get_metrics(self):
metrics = {}
metrics['count'] = DruidMetric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
)
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.is_num:
mt = corrected_type.lower() + 'Sum'
name = 'sum__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.avg and self.is_num:
mt = corrected_type.lower() + 'Avg'
name = 'avg__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.min and self.is_num:
mt = corrected_type.lower() + 'Min'
name = 'min__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
name = 'max__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.count_distinct:
name = 'count_distinct__' + self.column_name
if self.type == 'hyperUnique' or self.type == 'thetaSketch':
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type=self.type,
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
)
else:
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
)
return metrics
def generate_metrics(self):
"""Generate metrics based on the column metadata"""
metrics = self.get_metrics()
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.datasource.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(
DruidMetric.metric_name == m for m in metrics
))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
db.session.add(metric)
@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(DruidColumn).filter(
DruidColumn.datasource_name == lookup_column.datasource_name,
DruidColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
class DruidMetric(Model, BaseMetric):
"""ORM object referencing Druid metrics for a datasource"""
__tablename__ = 'metrics'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
json = Column(Text)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
)
@property
def expression(self):
return self.json
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception:
obj = {}
return obj
@property
def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
) if self.datasource else None
@classmethod
def import_obj(cls, i_metric):
def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_name == lookup_metric.datasource_name,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
class DruidDatasource(Model, BaseDatasource):
"""ORM object referencing Druid datasources (tables)"""
__tablename__ = 'datasources'
type = "druid"
query_langtage = "json"
cluster_class = DruidCluster
metric_class = DruidMetric
column_class = DruidColumn
baselink = "druiddatasourcemodelview"
# Columns
datasource_name = Column(String(255), unique=True)
is_hidden = Column(Boolean, default=False)
fetch_values_from = Column(String(100))
cluster_name = Column(
String(250), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship(
sm.user_model,
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)
@property
def database(self):
return self.cluster
@property
def connection(self):
return str(self.database)
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self):
return self.datasource_name
@property
def schema(self):
ds_name = self.datasource_name or ''
name_pieces = ds_name.split('.')
if len(name_pieces) > 1:
return name_pieces[0]
else:
return None
@property
def schema_perm(self):
"""Returns schema permission if present, cluster one otherwise."""
return utils.get_schema_perm(self.cluster, self.schema)
def get_perm(self):
return (
"[{obj.cluster_name}].[{obj.datasource_name}]"
"(id:{obj.id})").format(obj=self)
@property
def link(self):
name = escape(self.datasource_name)
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
@property
def full_name(self):
return utils.get_datasource_full_name(
self.cluster_name, self.datasource_name)
@property
def time_column_grains(self):
return {
"time_columns": [
'all', '5 seconds', '30 seconds', '1 minute',
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
}
def __repr__(self):
return self.datasource_name
@renders('datasource_name')
def datasource_link(self):
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
name = escape(self.datasource_name)
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
@classmethod
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_datasource(d):
return db.session.query(DruidDatasource).join(DruidCluster).filter(
DruidDatasource.datasource_name == d.datasource_name,
DruidCluster.cluster_name == d.cluster_name,
).first()
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)
@staticmethod
def version_higher(v1, v2):
"""is v1 higher than v2
>>> DruidDatasource.version_higher('0.8.2', '0.9.1')
False
>>> DruidDatasource.version_higher('0.8.2', '0.6.1')
True
>>> DruidDatasource.version_higher('0.8.2', '0.8.2')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9')
False
"""
def int_or_0(v):
try:
v = int(v)
except (TypeError, ValueError):
v = 0
return v
v1nums = [int_or_0(n) for n in v1.split('.')]
v2nums = [int_or_0(n) for n in v2.split('.')]
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return v1nums[0] > v2nums[0] or \
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])
def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
logging.info("Syncing datasource [{}]".format(self.datasource_name))
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['maxTime']
max_time = dparse(max_time)
# Query segmentMetadata for 7 days back. However, due to a bug,
# we need to set this interval to more than 1 day ago to exclude
# realtime segments, which triggered a bug (fixed in druid 0.8.2).
# https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
lbound = (max_time - timedelta(days=7)).isoformat()
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = (max_time - timedelta(1)).isoformat()
else:
rbound = max_time.isoformat()
segment_metadata = None
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed first attempt to get latest segment")
logging.exception(e)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = datetime.now().isoformat()
else:
rbound = datetime(2050, 1, 1).isoformat()[:10]
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed 2nd attempt to get latest segment")
logging.exception(e)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
self.generate_metrics_for(self.columns)
def generate_metrics_for(self, columns):
metrics = {}
for col in columns:
metrics.update(col.get_metrics())
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(DruidMetric.metric_name == m for m in metrics))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
with db.session.no_autoflush:
db.session.add(metric)
@classmethod
def sync_to_db_from_config(
cls,
druid_config,
user,
cluster,
refresh=True):
"""Merges the ds config from druid_config into one stored in the db."""
session = db.session
datasource = (
session.query(cls)
.filter_by(datasource_name=druid_config['name'])
.first()
)
# Create a new datasource.
if not datasource:
datasource = cls(
datasource_name=druid_config['name'],
cluster=cluster,
owner=user,
changed_by_fk=user.id,
created_by_fk=user.id,
)
session.add(datasource)
elif not refresh:
return
dimensions = druid_config['dimensions']
col_objs = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == druid_config['name'])
.filter(or_(DruidColumn.column_name == dim for dim in dimensions))
)
col_objs = {col.column_name: col for col in col_objs}
for dim in dimensions:
col_obj = col_objs.get(dim, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=druid_config['name'],
column_name=dim,
groupby=True,
filterable=True,
# TODO: fetch type from Hive.
type="STRING",
datasource=datasource,
)
session.add(col_obj)
# Import Druid metrics
metric_objs = (
session.query(DruidMetric)
.filter(DruidMetric.datasource_name == druid_config['name'])
.filter(or_(DruidMetric.metric_name == spec['name']
for spec in druid_config["metrics_spec"]))
)
metric_objs = {metric.metric_name: metric for metric in metric_objs}
for metric_spec in druid_config["metrics_spec"]:
metric_name = metric_spec["name"]
metric_type = metric_spec["type"]
metric_json = json.dumps(metric_spec)
if metric_type == "count":
metric_type = "longSum"
metric_json = json.dumps({
"type": "longSum",
"name": metric_name,
"fieldName": metric_name,
})
metric_obj = metric_objs.get(metric_name, None)
if not metric_obj:
metric_obj = DruidMetric(
metric_name=metric_name,
metric_type=metric_type,
verbose_name="%s(%s)" % (metric_type, metric_name),
datasource=datasource,
json=metric_json,
description=(
"Imported from the airolap config dir for %s" %
druid_config['name']),
)
session.add(metric_obj)
session.commit()
@staticmethod
def time_offset(granularity):
if granularity == 'week_ending_saturday':
return 6 * 24 * 3600 * 1000 # 6 days
return 0
# uses https://en.wikipedia.org/wiki/ISO_8601
# http://druid.io/docs/0.8.0/querying/granularities.html
# TODO: pass origin from the UI
@staticmethod
def granularity(period_name, timezone=None, origin=None):
if not period_name or period_name == 'all':
return 'all'
iso_8601_dict = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
}
granularity = {'type': 'period'}
if timezone:
granularity['timeZone'] = timezone
if origin:
dttm = utils.parse_human_datetime(origin)
granularity['origin'] = dttm.isoformat()
if period_name in iso_8601_dict:
granularity['period'] = iso_8601_dict[period_name]
if period_name in ('week_ending_saturday', 'week_starting_sunday'):
# use Sunday as start of the week
granularity['origin'] = '2016-01-03T00:00:00'
elif not isinstance(period_name, string_types):
granularity['type'] = 'duration'
granularity['duration'] = period_name
elif period_name.startswith('P'):
# identify if the string is the iso_8601 period
granularity['period'] = period_name
else:
granularity['type'] = 'duration'
granularity['duration'] = utils.parse_human_timedelta(
period_name).total_seconds() * 1000
return granularity
@staticmethod
def _metrics_and_post_aggs(metrics, metrics_dict):
all_metrics = []
post_aggs = {}
def recursive_get_fields(_conf):
_type = _conf.get('type')
_field = _conf.get('field')
_fields = _conf.get('fields')
field_names = []
if _type in ['fieldAccess', 'hyperUniqueCardinality',
'quantile', 'quantiles']:
field_names.append(_conf.get('fieldName', ''))
if _field:
field_names += recursive_get_fields(_field)
if _fields:
for _f in _fields:
field_names += recursive_get_fields(_f)
return list(set(field_names))
for metric_name in metrics:
metric = metrics_dict[metric_name]
if metric.metric_type != 'postagg':
all_metrics.append(metric_name)
else:
mconf = metric.json_obj
all_metrics += recursive_get_fields(mconf)
all_metrics += mconf.get('fieldNames', [])
if mconf.get('type') == 'javascript':
post_aggs[metric_name] = JavascriptPostAggregator(
name=mconf.get('name', ''),
field_names=mconf.get('fieldNames', []),
function=mconf.get('function', ''))
elif mconf.get('type') == 'quantile':
post_aggs[metric_name] = Quantile(
mconf.get('name', ''),
mconf.get('probability', ''),
)
elif mconf.get('type') == 'quantiles':
post_aggs[metric_name] = Quantiles(
mconf.get('name', ''),
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
mconf.get('fn', "/"),
mconf.get('fields', []),
mconf.get('name', ''))
else:
post_aggs[metric_name] = CustomPostAggregator(
mconf.get('name', ''),
mconf)
return all_metrics, post_aggs
def values_for_column(self,
column_name,
limit=10000):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
else:
from_dttm = datetime(1970, 1, 1)
qry = dict(
datasource=self.datasource_name,
granularity="all",
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
aggregations=dict(count=count("count")),
dimension=column_name,
metric="count",
threshold=limit,
)
client = self.cluster.get_pydruid_client()
client.topn(**qry)
df = client.export_pandas()
return [row[column_name] for row in df.to_records(index=False)]
def get_query_str(self, query_obj, phase=1, client=None):
return self.run_query(client=client, phase=phase, **query_obj)
def _add_filter_from_pre_query_data(self, df, dimensions, dim_filter):
ret = dim_filter
if df is not None and not df.empty:
new_filters = []
for unused, row in df.iterrows():
fields = []
for dim in dimensions:
f = Dimension(dim) == row[dim]
fields.append(f)
if len(fields) > 1:
term = Filter(type="and", fields=fields)
new_filters.append(term)
elif fields:
new_filters.append(fields[0])
if new_filters:
ff = Filter(type="or", fields=new_filters)
if not dim_filter:
ret = ff
else:
ret = Filter(type="and", fields=[ff, dim_filter])
return ret
def run_query( # noqa / druid
self,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
row_limit=None,
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None,
order_desc=True):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
timezone = from_dttm.tzname()
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
columns_dict = {c.column_name: c for c in self.columns}
all_metrics, post_aggs = self._metrics_and_post_aggs(
metrics,
metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
if m.metric_name in all_metrics:
aggregations[m.metric_name] = m.json_obj
rejected_metrics = [
m.metric_name for m in self.metrics
if m.is_restricted and
m.metric_name in aggregations.keys() and
not sm.has_access('metric_access', m.perm)
]
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
)
# the dimensions list with dimensionSpecs expanded
dimensions = []
groupby = [gb for gb in groupby if gb in columns_dict]
for column_name in groupby:
col = columns_dict.get(column_name)
dim_spec = col.dimension_spec
if dim_spec:
dimensions.append(dim_spec)
else:
dimensions.append(column_name)
qry = dict(
datasource=self.datasource_name,
dimensions=dimensions,
aggregations=aggregations,
granularity=DruidDatasource.granularity(
granularity,
timezone=timezone,
origin=extras.get('druid_time_origin'),
),
post_aggregations=post_aggs,
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = self.get_filters(filter)
if filters:
qry['filter'] = filters
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters
order_direction = "descending" if order_desc else "ascending"
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1 and order_desc:
dim = list(qry.get('dimensions'))[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
else:
order_by = list(qry['aggregations'].keys())[0]
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['threshold'] = min(row_limit,
timeseries_limit or row_limit)
pre_qry['metric'] = order_by
pre_qry['dimension'] = dim
del pre_qry['dimensions']
client.topn(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
qry['dimension'] = dim
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters or not order_desc:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
order_by = metrics[0] if metrics else self.metrics[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": min(timeseries_limit, row_limit),
'intervals': (
inner_from_dttm.isoformat() + '/' +
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": order_direction,
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": order_direction,
}],
}
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str
def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(
client=client, query_obj=query_obj, phase=2)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]
# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]
time_offset = DruidDatasource.time_offset(query_obj['granularity'])
def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)
if DTTM_ALIAS in df.columns and time_offset:
df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
def get_filters(self, raw_filters): # noqa
filters = None
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ('in', 'not in'):
eq = [
types.replace("'", '').strip()
if isinstance(types, string_types)
else types
for types in eq]
elif not isinstance(flt['val'], string_types):
eq = eq[0] if len(eq) > 0 else ''
if col in self.num_cols:
if op in ('in', 'not in'):
eq = [utils.string_to_num(v) for v in eq]
else:
eq = utils.string_to_num(eq)
if op == '==':
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
if len(eq) > 1:
for s in eq:
fields.append(Dimension(col) == s)
cond = Filter(type="or", fields=fields)
elif len(eq) == 1:
cond = Dimension(col) == eq[0]
if op == 'not in':
cond = ~cond
elif op == 'regex':
cond = Filter(type="regex", pattern=eq, dimension=col)
elif op == '>=':
cond = Dimension(col) >= eq
elif op == '<=':
cond = Dimension(col) <= eq
elif op == '>':
cond = Dimension(col) > eq
elif op == '<':
cond = Dimension(col) < eq
if filters:
filters = Filter(type="and", fields=[
cond,
filters
])
else:
filters = cond
return filters
def _get_having_obj(self, col, op, eq):
cond = None
if op == '==':
if col in self.column_names:
cond = DimSelector(dimension=col, value=eq)
else:
cond = Aggregation(col) == eq
elif op == '>':
cond = Aggregation(col) > eq
elif op == '<':
cond = Aggregation(col) < eq
return cond
def get_having_filters(self, raw_filters):
filters = None
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
}
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ['==', '>', '<']:
cond = self._get_having_obj(col, op, eq)
elif op in reversed_op_map:
cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
if filters:
filters = filters & cond
else:
filters = cond
return filters
@classmethod
def query_datasources_by_name(
cls, session, database, datasource_name, schema=None):
return (
session.query(cls)
.filter_by(cluster_name=database.id)
.filter_by(datasource_name=datasource_name)
.all()
)
sa.event.listen(DruidDatasource, 'after_insert', set_perm)
sa.event.listen(DruidDatasource, 'after_update', set_perm)
| superset/connectors/druid/models.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0008059138781391084,
0.000184087228262797,
0.0001619242539163679,
0.00016790222434792668,
0.00007133019244065508
]
|
{
"id": 4,
"code_window": [
"from datetime import datetime\n",
"import logging\n",
"\n",
"import sqlalchemy as sqla\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n",
"from flask_appbuilder.models.sqla.interface import SQLAInterface\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/druid/views.py",
"type": "replace",
"edit_start_line_idx": 3
} | import React from 'react';
import { expect } from 'chai';
import { describe, it } from 'mocha';
import { shallow } from 'enzyme';
import PopoverSection from '../../../javascripts/components/PopoverSection';
describe('PopoverSection', () => {
const defaultProps = {
title: 'Section Title',
isSelected: true,
onSelect: () => {},
info: 'info section',
children: <div />,
};
let wrapper;
const factory = (overrideProps) => {
const props = Object.assign({}, defaultProps, overrideProps || {});
return shallow(<PopoverSection {...props} />);
};
beforeEach(() => {
wrapper = factory();
});
it('renders', () => {
expect(React.isValidElement(<PopoverSection {...defaultProps} />)).to.equal(true);
});
it('is show an icon when selected', () => {
expect(wrapper.find('.fa-check')).to.have.length(1);
});
it('is show no icon when not selected', () => {
expect(factory({ isSelected: false }).find('.fa-check')).to.have.length(0);
});
});
| superset/assets/spec/javascripts/components/PopoverSection_spec.jsx | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017606475739739835,
0.00017220374138560146,
0.0001679298293311149,
0.0001724101894069463,
0.0000032128673410625197
]
|
{
"id": 4,
"code_window": [
"from datetime import datetime\n",
"import logging\n",
"\n",
"import sqlalchemy as sqla\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n",
"from flask_appbuilder.models.sqla.interface import SQLAInterface\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/druid/views.py",
"type": "replace",
"edit_start_line_idx": 3
} | """Unit tests for Superset"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import json
import os
import unittest
from flask_appbuilder.security.sqla import models as ab_models
from superset import app, cli, db, appbuilder, security, sm
from superset.models import core as models
from superset.security import sync_role_definitions
from superset.connectors.sqla.models import SqlaTable
from superset.connectors.druid.models import DruidCluster, DruidDatasource
os.environ['SUPERSET_CONFIG'] = 'tests.superset_test_config'
BASE_DIR = app.config.get("BASE_DIR")
class SupersetTestCase(unittest.TestCase):
requires_examples = False
examples_loaded = False
def __init__(self, *args, **kwargs):
if (
self.requires_examples and
not os.environ.get('SOLO_TEST') and
not os.environ.get('examples_loaded')
):
logging.info("Loading examples")
cli.load_examples(load_test_data=True)
logging.info("Done loading examples")
sync_role_definitions()
os.environ['examples_loaded'] = '1'
else:
sync_role_definitions()
super(SupersetTestCase, self).__init__(*args, **kwargs)
self.client = app.test_client()
self.maxDiff = None
gamma_sqllab_role = sm.add_role("gamma_sqllab")
for perm in sm.find_role('Gamma').permissions:
sm.add_permission_role(gamma_sqllab_role, perm)
db_perm = self.get_main_database(sm.get_session).perm
security.merge_perm(sm, 'database_access', db_perm)
db_pvm = sm.find_permission_view_menu(
view_menu_name=db_perm, permission_name='database_access')
gamma_sqllab_role.permissions.append(db_pvm)
for perm in sm.find_role('sql_lab').permissions:
sm.add_permission_role(gamma_sqllab_role, perm)
admin = appbuilder.sm.find_user('admin')
if not admin:
appbuilder.sm.add_user(
'admin', 'admin', ' user', '[email protected]',
appbuilder.sm.find_role('Admin'),
password='general')
gamma = appbuilder.sm.find_user('gamma')
if not gamma:
appbuilder.sm.add_user(
'gamma', 'gamma', 'user', '[email protected]',
appbuilder.sm.find_role('Gamma'),
password='general')
gamma2 = appbuilder.sm.find_user('gamma2')
if not gamma2:
appbuilder.sm.add_user(
'gamma2', 'gamma2', 'user', '[email protected]',
appbuilder.sm.find_role('Gamma'),
password='general')
gamma_sqllab_user = appbuilder.sm.find_user('gamma_sqllab')
if not gamma_sqllab_user:
appbuilder.sm.add_user(
'gamma_sqllab', 'gamma_sqllab', 'user', '[email protected]',
gamma_sqllab_role, password='general')
alpha = appbuilder.sm.find_user('alpha')
if not alpha:
appbuilder.sm.add_user(
'alpha', 'alpha', 'user', '[email protected]',
appbuilder.sm.find_role('Alpha'),
password='general')
sm.get_session.commit()
# create druid cluster and druid datasources
session = db.session
cluster = (
session.query(DruidCluster)
.filter_by(cluster_name="druid_test")
.first()
)
if not cluster:
cluster = DruidCluster(cluster_name="druid_test")
session.add(cluster)
session.commit()
druid_datasource1 = DruidDatasource(
datasource_name='druid_ds_1',
cluster_name='druid_test'
)
session.add(druid_datasource1)
druid_datasource2 = DruidDatasource(
datasource_name='druid_ds_2',
cluster_name='druid_test'
)
session.add(druid_datasource2)
session.commit()
def get_table(self, table_id):
return db.session.query(SqlaTable).filter_by(
id=table_id).first()
def get_or_create(self, cls, criteria, session):
obj = session.query(cls).filter_by(**criteria).first()
if not obj:
obj = cls(**criteria)
return obj
def login(self, username='admin', password='general'):
resp = self.get_resp(
'/login/',
data=dict(username=username, password=password))
self.assertIn('Welcome', resp)
def get_slice(self, slice_name, session):
slc = (
session.query(models.Slice)
.filter_by(slice_name=slice_name)
.one()
)
session.expunge_all()
return slc
def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(
table_name=name).first()
def get_druid_ds_by_name(self, name):
return db.session.query(DruidDatasource).filter_by(
datasource_name=name).first()
def get_resp(
self, url, data=None, follow_redirects=True, raise_on_error=True):
"""Shortcut to get the parsed results while following redirects"""
if data:
resp = self.client.post(
url, data=data, follow_redirects=follow_redirects)
else:
resp = self.client.get(url, follow_redirects=follow_redirects)
if raise_on_error and resp.status_code > 400:
raise Exception(
"http request failed with code {}".format(resp.status_code))
return resp.data.decode('utf-8')
def get_json_resp(
self, url, data=None, follow_redirects=True, raise_on_error=True):
"""Shortcut to get the parsed results while following redirects"""
resp = self.get_resp(url, data, follow_redirects, raise_on_error)
return json.loads(resp)
def get_main_database(self, session):
return (
db.session.query(models.Database)
.filter_by(database_name='main')
.first()
)
def get_access_requests(self, username, ds_type, ds_id):
DAR = models.DatasourceAccessRequest
return (
db.session.query(DAR)
.filter(
DAR.created_by == sm.find_user(username=username),
DAR.datasource_type == ds_type,
DAR.datasource_id == ds_id,
)
.first()
)
def logout(self):
self.client.get('/logout/', follow_redirects=True)
def grant_public_access_to_table(self, table):
public_role = appbuilder.sm.find_role('Public')
perms = db.session.query(ab_models.PermissionView).all()
for perm in perms:
if (perm.permission.name == 'datasource_access' and
perm.view_menu and table.perm in perm.view_menu.name):
appbuilder.sm.add_permission_role(public_role, perm)
def revoke_public_access_to_table(self, table):
public_role = appbuilder.sm.find_role('Public')
perms = db.session.query(ab_models.PermissionView).all()
for perm in perms:
if (perm.permission.name == 'datasource_access' and
perm.view_menu and table.perm in perm.view_menu.name):
appbuilder.sm.del_permission_role(public_role, perm)
def run_sql(self, sql, client_id, user_name=None, raise_on_error=False):
if user_name:
self.logout()
self.login(username=(user_name if user_name else 'admin'))
dbid = self.get_main_database(db.session).id
resp = self.get_json_resp(
'/superset/sql_json/',
raise_on_error=False,
data=dict(database_id=dbid, sql=sql, select_as_create_as=False,
client_id=client_id),
)
if raise_on_error and 'error' in resp:
raise Exception("run_sql failed")
return resp
def test_gamma_permissions(self):
def assert_can_read(view_menu):
self.assertIn(('can_show', view_menu), gamma_perm_set)
self.assertIn(('can_list', view_menu), gamma_perm_set)
def assert_can_write(view_menu):
self.assertIn(('can_add', view_menu), gamma_perm_set)
self.assertIn(('can_download', view_menu), gamma_perm_set)
self.assertIn(('can_delete', view_menu), gamma_perm_set)
self.assertIn(('can_edit', view_menu), gamma_perm_set)
def assert_cannot_write(view_menu):
self.assertNotIn(('can_add', view_menu), gamma_perm_set)
self.assertNotIn(('can_download', view_menu), gamma_perm_set)
self.assertNotIn(('can_delete', view_menu), gamma_perm_set)
self.assertNotIn(('can_edit', view_menu), gamma_perm_set)
self.assertNotIn(('can_save', view_menu), gamma_perm_set)
def assert_can_all(view_menu):
assert_can_read(view_menu)
assert_can_write(view_menu)
gamma_perm_set = set()
for perm in sm.find_role('Gamma').permissions:
gamma_perm_set.add((perm.permission.name, perm.view_menu.name))
# check read only perms
assert_can_read('TableModelView')
assert_cannot_write('DruidColumnInlineView')
# make sure that user can create slices and dashboards
assert_can_all('SliceModelView')
assert_can_all('DashboardModelView')
self.assertIn(('can_add_slices', 'Superset'), gamma_perm_set)
self.assertIn(('can_copy_dash', 'Superset'), gamma_perm_set)
self.assertIn(('can_activity_per_day', 'Superset'), gamma_perm_set)
self.assertIn(('can_created_dashboards', 'Superset'), gamma_perm_set)
self.assertIn(('can_created_slices', 'Superset'), gamma_perm_set)
self.assertIn(('can_csv', 'Superset'), gamma_perm_set)
self.assertIn(('can_dashboard', 'Superset'), gamma_perm_set)
self.assertIn(('can_explore', 'Superset'), gamma_perm_set)
self.assertIn(('can_explore_json', 'Superset'), gamma_perm_set)
self.assertIn(('can_fave_dashboards', 'Superset'), gamma_perm_set)
self.assertIn(('can_fave_slices', 'Superset'), gamma_perm_set)
self.assertIn(('can_save_dash', 'Superset'), gamma_perm_set)
self.assertIn(('can_slice', 'Superset'), gamma_perm_set)
| tests/base_tests.py | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.004758457187563181,
0.00036688928958028555,
0.00016160633822437376,
0.00017157563706859946,
0.000863406341522932
]
|
{
"id": 4,
"code_window": [
"from datetime import datetime\n",
"import logging\n",
"\n",
"import sqlalchemy as sqla\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n",
"from flask_appbuilder.models.sqla.interface import SQLAInterface\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/druid/views.py",
"type": "replace",
"edit_start_line_idx": 3
} | import d3 from 'd3';
import dt from 'datatables.net-bs';
import 'datatables.net-bs/css/dataTables.bootstrap.css';
import { fixDataTableBodyHeight, d3TimeFormatPreset } from '../javascripts/modules/utils';
import './table.css';
const $ = require('jquery');
dt(window, $);
function tableVis(slice, payload) {
const container = $(slice.selector);
const fC = d3.format('0,000');
const data = payload.data;
const fd = slice.formData;
// Removing metrics (aggregates) that are strings
let metrics = fd.metrics || [];
metrics = metrics.filter(m => !isNaN(data.records[0][m]));
function col(c) {
const arr = [];
for (let i = 0; i < data.records.length; i += 1) {
arr.push(data.records[i][c]);
}
return arr;
}
const maxes = {};
for (let i = 0; i < metrics.length; i += 1) {
maxes[metrics[i]] = d3.max(col(metrics[i]));
}
const tsFormatter = d3TimeFormatPreset(fd.table_timestamp_format);
const div = d3.select(slice.selector);
div.html('');
const table = div.append('table')
.classed(
'dataframe dataframe table table-striped table-bordered ' +
'table-condensed table-hover dataTable no-footer', true)
.attr('width', '100%');
const cols = data.columns.map(c => slice.datasource.verbose_map[c] || c);
table.append('thead').append('tr')
.selectAll('th')
.data(cols)
.enter()
.append('th')
.text(function (d) {
return d;
});
table.append('tbody')
.selectAll('tr')
.data(data.records)
.enter()
.append('tr')
.selectAll('td')
.data(row => data.columns.map((c) => {
const val = row[c];
let html;
const isMetric = metrics.indexOf(c) >= 0;
if (c === '__timestamp') {
html = tsFormatter(val);
}
if (typeof (val) === 'string') {
html = `<span class="like-pre">${val}</span>`;
}
if (isMetric) {
html = slice.d3format(c, val);
}
return {
col: c,
val,
html,
isMetric,
};
}))
.enter()
.append('td')
.style('background-image', function (d) {
if (d.isMetric) {
const perc = Math.round((d.val / maxes[d.col]) * 100);
// The 0.01 to 0.001 is a workaround for what appears to be a
// CSS rendering bug on flat, transparent colors
return (
`linear-gradient(to left, rgba(0,0,0,0.2), rgba(0,0,0,0.2) ${perc}%, ` +
`rgba(0,0,0,0.01) ${perc}%, rgba(0,0,0,0.001) 100%)`
);
}
return null;
})
.classed('text-right', d => d.isMetric)
.attr('title', (d) => {
if (!isNaN(d.val)) {
return fC(d.val);
}
return null;
})
.attr('data-sort', function (d) {
return (d.isMetric) ? d.val : null;
})
.on('click', function (d) {
if (!d.isMetric && fd.table_filter) {
const td = d3.select(this);
if (td.classed('filtered')) {
slice.removeFilter(d.col, [d.val]);
d3.select(this).classed('filtered', false);
} else {
d3.select(this).classed('filtered', true);
slice.addFilter(d.col, [d.val]);
}
}
})
.style('cursor', function (d) {
return (!d.isMetric) ? 'pointer' : '';
})
.html(d => d.html ? d.html : d.val);
const height = slice.height();
let paging = false;
let pageLength;
if (fd.page_length && fd.page_length > 0) {
paging = true;
pageLength = parseInt(fd.page_length, 10);
}
const datatable = container.find('.dataTable').DataTable({
paging,
pageLength,
aaSorting: [],
searching: fd.include_search,
bInfo: false,
scrollY: height + 'px',
scrollCollapse: true,
scrollX: true,
});
fixDataTableBodyHeight(
container.find('.dataTables_wrapper'), height);
// Sorting table by main column
let sortBy;
if (fd.timeseries_limit_metric) {
// Sort by as specified
sortBy = fd.timeseries_limit_metric;
} else if (metrics.length > 0) {
// If not specified, use the first metric from the list
sortBy = metrics[0];
}
if (sortBy) {
datatable.column(data.columns.indexOf(sortBy)).order(fd.order_desc ? 'desc' : 'asc');
}
if (fd.timeseries_limit_metric && metrics.indexOf(fd.timeseries_limit_metric) < 0) {
// Hiding the sortBy column if not in the metrics list
datatable.column(data.columns.indexOf(sortBy)).visible(false);
}
datatable.draw();
container.parents('.widget').find('.tooltip').remove();
}
module.exports = tableVis;
| superset/assets/visualizations/table.js | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0002552857040427625,
0.0001757630379870534,
0.00016369897639378905,
0.00017165204917546362,
0.000020056100765941665
]
|
{
"id": 5,
"code_window": [
"\"\"\"Views used by the SqlAlchemy connector\"\"\"\n",
"import logging\n",
"from past.builtins import basestring\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/sqla/views.py",
"type": "replace",
"edit_start_line_idx": 1
} | from datetime import datetime
import logging
import sqlalchemy as sqla
from flask import Markup, flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __
from superset import db, utils, appbuilder, sm, security
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import has_access
from superset.connectors.base.views import DatasourceModelView
from superset.views.base import (
BaseSupersetView,
SupersetModelView, validate_json, DeleteMixin, ListWidgetWithCheckboxes,
DatasourceFilter, get_datasource_exist_error_mgs)
from . import models
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidColumn)
list_title = _('List Druid Column')
show_title = _('Show Druid Column')
add_title = _('Add Druid Column')
edit_title = _('Edit Druid Column')
edit_columns = [
'column_name', 'description', 'dimension_spec_json', 'datasource',
'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
add_columns = edit_columns
list_columns = [
'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max']
can_delete = False
page_size = 500
label_columns = {
'column_name': _("Column"),
'type': _("Type"),
'datasource': _("Datasource"),
'groupby': _("Groupable"),
'filterable': _("Filterable"),
'count_distinct': _("Count Distinct"),
'sum': _("Sum"),
'min': _("Min"),
'max': _("Max"),
}
description_columns = {
'filterable': _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."),
'dimension_spec_json': utils.markdown(
"this field can be used to specify "
"a `dimensionSpec` as documented [here]"
"(http://druid.io/docs/latest/querying/dimensionspecs.html). "
"Make sure to input valid JSON and that the "
"`outputName` matches the `column_name` defined "
"above.",
True),
}
def post_update(self, col):
col.generate_metrics()
utils.validate_json(col.dimension_spec_json)
def post_add(self, col):
self.post_update(col)
appbuilder.add_view_no_menu(DruidColumnInlineView)
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidMetric)
list_title = _('List Druid Metric')
show_title = _('Show Druid Metric')
add_title = _('Add Druid Metric')
edit_title = _('Edit Druid Metric')
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
'datasource', 'd3format', 'is_restricted', 'warning_text']
add_columns = edit_columns
page_size = 500
validators_columns = {
'json': [validate_json],
}
description_columns = {
'metric_type': utils.markdown(
"use `postagg` as the metric type if you are defining a "
"[Druid Post Aggregation]"
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
True),
'is_restricted': _("Whether the access to this metric is restricted "
"to certain roles. Only roles with the permission "
"'metric access on XXX (the name of this metric)' "
"are allowed to access this metric"),
}
label_columns = {
'metric_name': _("Metric"),
'description': _("Description"),
'verbose_name': _("Verbose Name"),
'metric_type': _("Type"),
'json': _("JSON"),
'datasource': _("Druid Datasource"),
'warning_text': _("Warning Message"),
}
def post_add(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
def post_update(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DruidClusterModelView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidCluster)
list_title = _('List Druid Cluster')
show_title = _('Show Druid Cluster')
add_title = _('Add Druid Cluster')
edit_title = _('Edit Druid Cluster')
add_columns = [
'verbose_name', 'coordinator_host', 'coordinator_port',
'coordinator_endpoint', 'broker_host', 'broker_port',
'broker_endpoint', 'cache_timeout', 'cluster_name',
]
edit_columns = add_columns
list_columns = ['cluster_name', 'metadata_last_refreshed']
search_columns = ('cluster_name',)
label_columns = {
'cluster_name': _("Cluster"),
'coordinator_host': _("Coordinator Host"),
'coordinator_port': _("Coordinator Port"),
'coordinator_endpoint': _("Coordinator Endpoint"),
'broker_host': _("Broker Host"),
'broker_port': _("Broker Port"),
'broker_endpoint': _("Broker Endpoint"),
}
def pre_add(self, cluster):
security.merge_perm(sm, 'database_access', cluster.perm)
def pre_update(self, cluster):
self.pre_add(cluster)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidClusterModelView,
name="Druid Clusters",
label=__("Druid Clusters"),
icon="fa-cubes",
category="Sources",
category_label=__("Sources"),
category_icon='fa-database',)
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidDatasource)
list_title = _('List Druid Datasource')
show_title = _('Show Druid Datasource')
add_title = _('Add Druid Datasource')
edit_title = _('Edit Druid Datasource')
list_widget = ListWidgetWithCheckboxes
list_columns = [
'datasource_link', 'cluster', 'changed_by_', 'modified']
related_views = [DruidColumnInlineView, DruidMetricInlineView]
edit_columns = [
'datasource_name', 'cluster', 'slices', 'description', 'owner',
'is_hidden',
'filter_select_enabled', 'fetch_values_from',
'default_endpoint', 'offset', 'cache_timeout']
search_columns = (
'datasource_name', 'cluster', 'description', 'owner'
)
add_columns = edit_columns
show_columns = add_columns + ['perm']
page_size = 500
base_order = ('datasource_name', 'asc')
description_columns = {
'slices': _(
"The list of slices associated with this table. By "
"altering this datasource, you may change how these associated "
"slices behave. "
"Also note that slices need to point to a datasource, so "
"this form will fail at saving if removing slices from a "
"datasource. If you want to change the datasource for a slice, "
"overwrite the slice from the 'explore view'"),
'offset': _("Timezone offset (in hours) for this datasource"),
'description': Markup(
"Supports <a href='"
"https://daringfireball.net/projects/markdown/'>markdown</a>"),
'fetch_values_from': _(
"Time expression to use as a predicate when retrieving "
"distinct values to populate the filter component. "
"Only applies when `Enable Filter Select` is on. If "
"you enter `7 days ago`, the distinct list of values in "
"the filter will be populated based on the distinct value over "
"the past week"),
'filter_select_enabled': _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"),
'default_endpoint': _(
"Redirects to this endpoint when clicking on the datasource "
"from the datasource list"),
}
base_filters = [['id', DatasourceFilter, lambda: []]]
label_columns = {
'slices': _("Associated Slices"),
'datasource_link': _("Data Source"),
'cluster': _("Cluster"),
'description': _("Description"),
'owner': _("Owner"),
'is_hidden': _("Is Hidden"),
'filter_select_enabled': _("Enable Filter Select"),
'default_endpoint': _("Default Endpoint"),
'offset': _("Time Offset"),
'cache_timeout': _("Cache Timeout"),
}
def pre_add(self, datasource):
with db.session.no_autoflush:
query = (
db.session.query(models.DruidDatasource)
.filter(models.DruidDatasource.datasource_name ==
datasource.datasource_name,
models.DruidDatasource.cluster_name ==
datasource.cluster.id)
)
if db.session.query(query.exists()).scalar():
raise Exception(get_datasource_exist_error_mgs(
datasource.full_name))
def post_add(self, datasource):
datasource.generate_metrics()
security.merge_perm(sm, 'datasource_access', datasource.get_perm())
if datasource.schema:
security.merge_perm(sm, 'schema_access', datasource.schema_perm)
def post_update(self, datasource):
self.post_add(datasource)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidDatasourceModelView,
"Druid Datasources",
label=__("Druid Datasources"),
category="Sources",
category_label=__("Sources"),
icon="fa-cube")
class Druid(BaseSupersetView):
"""The base views for Superset!"""
@has_access
@expose("/refresh_datasources/")
def refresh_datasources(self, refreshAll=True):
"""endpoint that refreshes druid datasources metadata"""
session = db.session()
DruidCluster = ConnectorRegistry.sources['druid'].cluster_class
for cluster in session.query(DruidCluster).all():
cluster_name = cluster.cluster_name
try:
cluster.refresh_datasources(refreshAll=refreshAll)
except Exception as e:
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)),
"danger")
logging.exception(e)
return redirect('/druidclustermodelview/list/')
cluster.metadata_last_refreshed = datetime.now()
flash(
"Refreshed metadata from cluster "
"[" + cluster.cluster_name + "]",
'info')
session.commit()
return redirect("/druiddatasourcemodelview/list/")
@has_access
@expose("/scan_new_datasources/")
def scan_new_datasources(self):
"""
Calling this endpoint will cause a scan for new
datasources only and add them.
"""
return self.refresh_datasources(refreshAll=False)
appbuilder.add_view_no_menu(Druid)
appbuilder.add_link(
"Scan New Datasources",
label=__("Scan New Datasources"),
href='/druid/scan_new_datasources/',
category='Sources',
category_label=__("Sources"),
category_icon='fa-database',
icon="fa-refresh")
appbuilder.add_link(
"Refresh Druid Metadata",
label=__("Refresh Druid Metadata"),
href='/druid/refresh_datasources/',
category='Sources',
category_label=__("Sources"),
category_icon='fa-database',
icon="fa-cog")
appbuilder.add_separator("Sources", )
| superset/connectors/druid/views.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0045749349519610405,
0.000690234184730798,
0.0001606104342499748,
0.00017316512821707875,
0.0011395979672670364
]
|
{
"id": 5,
"code_window": [
"\"\"\"Views used by the SqlAlchemy connector\"\"\"\n",
"import logging\n",
"from past.builtins import basestring\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/sqla/views.py",
"type": "replace",
"edit_start_line_idx": 1
} | .parcoords svg, .parcoords canvas {
font-size: 12px;
position: absolute;
}
.parcoords > canvas {
pointer-events: none;
}
.parcoords text.label {
font: 100%;
font-size: 12px;
cursor: drag;
}
.parcoords rect.background {
fill: transparent;
}
.parcoords rect.background:hover {
fill: rgba(120,120,120,0.2);
}
.parcoords .resize rect {
fill: rgba(0,0,0,0.1);
}
.parcoords rect.extent {
fill: rgba(255,255,255,0.25);
stroke: rgba(0,0,0,0.6);
}
.parcoords .axis line, .parcoords .axis path {
fill: none;
stroke: #222;
shape-rendering: crispEdges;
}
.parcoords canvas {
opacity: 1;
-moz-transition: opacity 0.3s;
-webkit-transition: opacity 0.3s;
-o-transition: opacity 0.3s;
}
.parcoords canvas.faded {
opacity: 0.25;
}
.parcoords {
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
background-color: white;
}
/* data table styles */
.parcoords .row, .parcoords .header {
clear: left; font-size: 12px; line-height: 18px; height: 18px;
margin: 0px;
}
.parcoords .row:nth-child(odd) {
background: rgba(0,0,0,0.05);
}
.parcoords .header {
font-weight: bold;
}
.parcoords .cell {
float: left;
overflow: hidden;
white-space: nowrap;
width: 100px; height: 18px;
}
.parcoords .col-0 {
width: 180px;
}
| superset/assets/vendor/parallel_coordinates/d3.parcoords.css | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.000176782050402835,
0.00017342547653242946,
0.00016976181359495968,
0.0001737600250635296,
0.000002389042265349417
]
|
{
"id": 5,
"code_window": [
"\"\"\"Views used by the SqlAlchemy connector\"\"\"\n",
"import logging\n",
"from past.builtins import basestring\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/sqla/views.py",
"type": "replace",
"edit_start_line_idx": 1
} | import { it, describe } from 'mocha';
import { expect } from 'chai';
import {
tickMultiFormat,
formatDate,
fDuration,
now,
epochTimeXHoursAgo,
epochTimeXDaysAgo,
epochTimeXYearsAgo,
} from '../../../javascripts/modules/dates';
describe('tickMultiFormat', () => {
it('is a function', () => {
assert.isFunction(tickMultiFormat);
});
});
describe('formatDate', () => {
it('is a function', () => {
assert.isFunction(formatDate);
});
});
describe('fDuration', () => {
it('is a function', () => {
assert.isFunction(fDuration);
});
it('returns a string', () => {
expect(fDuration(new Date(), new Date())).to.be.a('string');
});
it('returns the expected output', () => {
const output = fDuration('1496293608897', '1496293623406');
expect(output).to.equal('00:00:14.50');
});
});
describe('now', () => {
it('is a function', () => {
assert.isFunction(now);
});
it('returns a number', () => {
expect(now()).to.be.a('number');
});
});
describe('epochTimeXHoursAgo', () => {
it('is a function', () => {
assert.isFunction(epochTimeXHoursAgo);
});
it('returns a number', () => {
expect(epochTimeXHoursAgo(1)).to.be.a('number');
});
});
describe('epochTimeXDaysAgo', () => {
it('is a function', () => {
assert.isFunction(epochTimeXDaysAgo);
});
it('returns a number', () => {
expect(epochTimeXDaysAgo(1)).to.be.a('number');
});
});
describe('epochTimeXYearsAgo', () => {
it('is a function', () => {
assert.isFunction(epochTimeXYearsAgo);
});
it('returns a number', () => {
expect(epochTimeXYearsAgo(1)).to.be.a('number');
});
});
| superset/assets/spec/javascripts/modules/dates_spec.js | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017496207146905363,
0.00017291380208916962,
0.0001706938201095909,
0.00017371596186421812,
0.0000016988908555504167
]
|
{
"id": 5,
"code_window": [
"\"\"\"Views used by the SqlAlchemy connector\"\"\"\n",
"import logging\n",
"from past.builtins import basestring\n",
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/sqla/views.py",
"type": "replace",
"edit_start_line_idx": 1
} | import { getExploreUrl } from '../exploreUtils';
import { getFormDataFromControls } from '../stores/store';
import { triggerQuery } from './exploreActions';
const $ = window.$ = require('jquery');
export const CHART_UPDATE_STARTED = 'CHART_UPDATE_STARTED';
export function chartUpdateStarted(queryRequest, latestQueryFormData) {
return { type: CHART_UPDATE_STARTED, queryRequest, latestQueryFormData };
}
export const CHART_UPDATE_SUCCEEDED = 'CHART_UPDATE_SUCCEEDED';
export function chartUpdateSucceeded(queryResponse) {
return { type: CHART_UPDATE_SUCCEEDED, queryResponse };
}
export const CHART_UPDATE_STOPPED = 'CHART_UPDATE_STOPPED';
export function chartUpdateStopped(queryRequest) {
if (queryRequest) {
queryRequest.abort();
}
return { type: CHART_UPDATE_STOPPED };
}
export const CHART_UPDATE_TIMEOUT = 'CHART_UPDATE_TIMEOUT';
export function chartUpdateTimeout(statusText, timeout) {
return { type: CHART_UPDATE_TIMEOUT, statusText, timeout };
}
export const CHART_UPDATE_FAILED = 'CHART_UPDATE_FAILED';
export function chartUpdateFailed(queryResponse) {
return { type: CHART_UPDATE_FAILED, queryResponse };
}
export const UPDATE_CHART_STATUS = 'UPDATE_CHART_STATUS';
export function updateChartStatus(status) {
return { type: UPDATE_CHART_STATUS, status };
}
export const CHART_RENDERING_FAILED = 'CHART_RENDERING_FAILED';
export function chartRenderingFailed(error) {
return { type: CHART_RENDERING_FAILED, error };
}
export const REMOVE_CHART_ALERT = 'REMOVE_CHART_ALERT';
export function removeChartAlert() {
return { type: REMOVE_CHART_ALERT };
}
export const RUN_QUERY = 'RUN_QUERY';
export function runQuery(formData, force = false, timeout = 60) {
return function (dispatch, getState) {
const { explore } = getState();
const lastQueryFormData = getFormDataFromControls(explore.controls);
const url = getExploreUrl(formData, 'json', force);
const queryRequest = $.ajax({
url,
dataType: 'json',
success(queryResponse) {
dispatch(chartUpdateSucceeded(queryResponse));
},
error(err) {
if (err.statusText === 'timeout') {
dispatch(chartUpdateTimeout(err.statusText, timeout));
} else if (err.statusText !== 'abort') {
dispatch(chartUpdateFailed(err.responseJSON));
}
},
timeout: timeout * 1000,
});
dispatch(chartUpdateStarted(queryRequest, lastQueryFormData));
dispatch(triggerQuery(false));
};
}
| superset/assets/javascripts/explore/actions/chartActions.js | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.00017460458911955357,
0.00016970551223494112,
0.0001644848962314427,
0.00016975791368167847,
0.000003280968485341873
]
|
{
"id": 6,
"code_window": [
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n",
"from flask_appbuilder.models.sqla.interface import SQLAInterface\n",
"import sqlalchemy as sa\n",
"\n",
"from flask_babel import lazy_gettext as _\n",
"from flask_babel import gettext as __\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/sqla/views.py",
"type": "replace",
"edit_start_line_idx": 7
} | # pylint: disable=invalid-unary-operand-type
from collections import OrderedDict
import json
import logging
from copy import deepcopy
from datetime import datetime, timedelta
from six import string_types
from multiprocessing import Pool
import requests
import sqlalchemy as sa
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean,
DateTime, or_, and_,
)
from sqlalchemy.orm import backref, relationship
from dateutil.parser import parse as dparse
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.postaggregator import (
Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality,
)
from pydruid.utils.having import Aggregation
from flask import Markup, escape
from flask_appbuilder.models.decorators import renders
from flask_appbuilder import Model
from flask_babel import lazy_gettext as _
from superset import conf, db, import_util, utils, sm, get_session
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
return datasource.latest_metadata()
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
'type': 'javascript',
'fieldNames': field_names,
'name': name,
'function': function,
}
self.name = name
class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
__tablename__ = 'clusters'
type = "druid"
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
cluster_name = Column(String(250), unique=True)
coordinator_host = Column(String(255))
coordinator_port = Column(Integer, default=8081)
coordinator_endpoint = Column(
String(255), default='druid/coordinator/v1/metadata')
broker_host = Column(String(255))
broker_port = Column(Integer, default=8082)
broker_endpoint = Column(String(255), default='druid/v2')
metadata_last_refreshed = Column(DateTime)
cache_timeout = Column(Integer)
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name
def get_pydruid_client(self):
cli = PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def get_datasources(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
"{obj.coordinator_endpoint}/datasources"
).format(obj=self)
return json.loads(requests.get(endpoint).text)
def get_druid_version(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
).format(obj=self)
return json.loads(requests.get(endpoint).text)['version']
def refresh_datasources(
self,
datasource_name=None,
merge_flag=True,
refreshAll=True):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
ds_list = self.get_datasources()
blacklist = conf.get('DRUID_DATA_SOURCE_BLACKLIST', [])
ds_refresh = []
if not datasource_name:
ds_refresh = list(filter(lambda ds: ds not in blacklist, ds_list))
elif datasource_name not in blacklist and datasource_name in ds_list:
ds_refresh.append(datasource_name)
else:
return
self.refresh_async(ds_refresh, merge_flag, refreshAll)
def refresh_async(self, datasource_names, merge_flag, refreshAll):
"""
Fetches metadata for the specified datasources andm
merges to the Superset database
"""
session = db.session
ds_list = (
session.query(DruidDatasource)
.filter(or_(DruidDatasource.datasource_name == name
for name in datasource_names))
)
ds_map = {ds.name: ds for ds in ds_list}
for ds_name in datasource_names:
datasource = ds_map.get(ds_name, None)
if not datasource:
datasource = DruidDatasource(datasource_name=ds_name)
with session.no_autoflush:
session.add(datasource)
flasher(
"Adding new datasource [{}]".format(ds_name), 'success')
ds_map[ds_name] = datasource
elif refreshAll:
flasher(
"Refreshing datasource [{}]".format(ds_name), 'info')
else:
del ds_map[ds_name]
continue
datasource.cluster = self
datasource.merge_flag = merge_flag
session.flush()
# Prepare multithreaded executation
pool = Pool()
ds_refresh = list(ds_map.values())
metadata = pool.map(_fetch_metadata_for, ds_refresh)
pool.close()
pool.join()
for i in range(0, len(ds_refresh)):
datasource = ds_refresh[i]
cols = metadata[i]
col_objs_list = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == datasource.datasource_name)
.filter(or_(DruidColumn.column_name == col for col in cols))
)
col_objs = {col.column_name: col for col in col_objs_list}
for col in cols:
if col == '__time': # skip the time column
continue
col_obj = col_objs.get(col, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=datasource.datasource_name,
column_name=col)
with session.no_autoflush:
session.add(col_obj)
datatype = cols[col]['type']
if datatype == 'STRING':
col_obj.groupby = True
col_obj.filterable = True
if datatype == 'hyperUnique' or datatype == 'thetaSketch':
col_obj.count_distinct = True
# Allow sum/min/max for long or double
if datatype == 'LONG' or datatype == 'DOUBLE':
col_obj.sum = True
col_obj.min = True
col_obj.max = True
col_obj.type = datatype
col_obj.datasource = datasource
datasource.generate_metrics_for(col_objs_list)
session.commit()
@property
def perm(self):
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
def get_perm(self):
return self.perm
@property
def name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
@property
def unique_name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
class DruidColumn(Model, BaseColumn):
"""ORM model for storing Druid datasource column metadata"""
__tablename__ = 'columns'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
enable_typechecks=False)
dimension_spec_json = Column(Text)
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
)
def __repr__(self):
return self.column_name
@property
def expression(self):
return self.dimension_spec_json
@property
def dimension_spec(self):
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
def get_metrics(self):
metrics = {}
metrics['count'] = DruidMetric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
)
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.is_num:
mt = corrected_type.lower() + 'Sum'
name = 'sum__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.avg and self.is_num:
mt = corrected_type.lower() + 'Avg'
name = 'avg__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.min and self.is_num:
mt = corrected_type.lower() + 'Min'
name = 'min__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
name = 'max__' + self.column_name
metrics[name] = DruidMetric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
)
if self.count_distinct:
name = 'count_distinct__' + self.column_name
if self.type == 'hyperUnique' or self.type == 'thetaSketch':
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type=self.type,
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
)
else:
metrics[name] = DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
)
return metrics
def generate_metrics(self):
"""Generate metrics based on the column metadata"""
metrics = self.get_metrics()
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.datasource.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(
DruidMetric.metric_name == m for m in metrics
))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
db.session.add(metric)
@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(DruidColumn).filter(
DruidColumn.datasource_name == lookup_column.datasource_name,
DruidColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
class DruidMetric(Model, BaseMetric):
"""ORM object referencing Druid metrics for a datasource"""
__tablename__ = 'metrics'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
json = Column(Text)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
)
@property
def expression(self):
return self.json
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception:
obj = {}
return obj
@property
def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
) if self.datasource else None
@classmethod
def import_obj(cls, i_metric):
def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_name == lookup_metric.datasource_name,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
class DruidDatasource(Model, BaseDatasource):
"""ORM object referencing Druid datasources (tables)"""
__tablename__ = 'datasources'
type = "druid"
query_langtage = "json"
cluster_class = DruidCluster
metric_class = DruidMetric
column_class = DruidColumn
baselink = "druiddatasourcemodelview"
# Columns
datasource_name = Column(String(255), unique=True)
is_hidden = Column(Boolean, default=False)
fetch_values_from = Column(String(100))
cluster_name = Column(
String(250), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship(
sm.user_model,
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)
@property
def database(self):
return self.cluster
@property
def connection(self):
return str(self.database)
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self):
return self.datasource_name
@property
def schema(self):
ds_name = self.datasource_name or ''
name_pieces = ds_name.split('.')
if len(name_pieces) > 1:
return name_pieces[0]
else:
return None
@property
def schema_perm(self):
"""Returns schema permission if present, cluster one otherwise."""
return utils.get_schema_perm(self.cluster, self.schema)
def get_perm(self):
return (
"[{obj.cluster_name}].[{obj.datasource_name}]"
"(id:{obj.id})").format(obj=self)
@property
def link(self):
name = escape(self.datasource_name)
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
@property
def full_name(self):
return utils.get_datasource_full_name(
self.cluster_name, self.datasource_name)
@property
def time_column_grains(self):
return {
"time_columns": [
'all', '5 seconds', '30 seconds', '1 minute',
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
}
def __repr__(self):
return self.datasource_name
@renders('datasource_name')
def datasource_link(self):
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
name = escape(self.datasource_name)
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
@classmethod
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_datasource(d):
return db.session.query(DruidDatasource).join(DruidCluster).filter(
DruidDatasource.datasource_name == d.datasource_name,
DruidCluster.cluster_name == d.cluster_name,
).first()
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)
@staticmethod
def version_higher(v1, v2):
"""is v1 higher than v2
>>> DruidDatasource.version_higher('0.8.2', '0.9.1')
False
>>> DruidDatasource.version_higher('0.8.2', '0.6.1')
True
>>> DruidDatasource.version_higher('0.8.2', '0.8.2')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9')
False
"""
def int_or_0(v):
try:
v = int(v)
except (TypeError, ValueError):
v = 0
return v
v1nums = [int_or_0(n) for n in v1.split('.')]
v2nums = [int_or_0(n) for n in v2.split('.')]
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return v1nums[0] > v2nums[0] or \
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])
def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
logging.info("Syncing datasource [{}]".format(self.datasource_name))
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['maxTime']
max_time = dparse(max_time)
# Query segmentMetadata for 7 days back. However, due to a bug,
# we need to set this interval to more than 1 day ago to exclude
# realtime segments, which triggered a bug (fixed in druid 0.8.2).
# https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
lbound = (max_time - timedelta(days=7)).isoformat()
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = (max_time - timedelta(1)).isoformat()
else:
rbound = max_time.isoformat()
segment_metadata = None
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed first attempt to get latest segment")
logging.exception(e)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = datetime.now().isoformat()
else:
rbound = datetime(2050, 1, 1).isoformat()[:10]
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=[])
except Exception as e:
logging.warning("Failed 2nd attempt to get latest segment")
logging.exception(e)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
self.generate_metrics_for(self.columns)
def generate_metrics_for(self, columns):
metrics = {}
for col in columns:
metrics.update(col.get_metrics())
dbmetrics = (
db.session.query(DruidMetric)
.filter(DruidCluster.cluster_name == self.cluster_name)
.filter(DruidMetric.datasource_name == self.datasource_name)
.filter(or_(DruidMetric.metric_name == m for m in metrics))
)
dbmetrics = {metric.metric_name: metric for metric in dbmetrics}
for metric in metrics.values():
metric.datasource_name = self.datasource_name
if not dbmetrics.get(metric.metric_name, None):
with db.session.no_autoflush:
db.session.add(metric)
@classmethod
def sync_to_db_from_config(
cls,
druid_config,
user,
cluster,
refresh=True):
"""Merges the ds config from druid_config into one stored in the db."""
session = db.session
datasource = (
session.query(cls)
.filter_by(datasource_name=druid_config['name'])
.first()
)
# Create a new datasource.
if not datasource:
datasource = cls(
datasource_name=druid_config['name'],
cluster=cluster,
owner=user,
changed_by_fk=user.id,
created_by_fk=user.id,
)
session.add(datasource)
elif not refresh:
return
dimensions = druid_config['dimensions']
col_objs = (
session.query(DruidColumn)
.filter(DruidColumn.datasource_name == druid_config['name'])
.filter(or_(DruidColumn.column_name == dim for dim in dimensions))
)
col_objs = {col.column_name: col for col in col_objs}
for dim in dimensions:
col_obj = col_objs.get(dim, None)
if not col_obj:
col_obj = DruidColumn(
datasource_name=druid_config['name'],
column_name=dim,
groupby=True,
filterable=True,
# TODO: fetch type from Hive.
type="STRING",
datasource=datasource,
)
session.add(col_obj)
# Import Druid metrics
metric_objs = (
session.query(DruidMetric)
.filter(DruidMetric.datasource_name == druid_config['name'])
.filter(or_(DruidMetric.metric_name == spec['name']
for spec in druid_config["metrics_spec"]))
)
metric_objs = {metric.metric_name: metric for metric in metric_objs}
for metric_spec in druid_config["metrics_spec"]:
metric_name = metric_spec["name"]
metric_type = metric_spec["type"]
metric_json = json.dumps(metric_spec)
if metric_type == "count":
metric_type = "longSum"
metric_json = json.dumps({
"type": "longSum",
"name": metric_name,
"fieldName": metric_name,
})
metric_obj = metric_objs.get(metric_name, None)
if not metric_obj:
metric_obj = DruidMetric(
metric_name=metric_name,
metric_type=metric_type,
verbose_name="%s(%s)" % (metric_type, metric_name),
datasource=datasource,
json=metric_json,
description=(
"Imported from the airolap config dir for %s" %
druid_config['name']),
)
session.add(metric_obj)
session.commit()
@staticmethod
def time_offset(granularity):
if granularity == 'week_ending_saturday':
return 6 * 24 * 3600 * 1000 # 6 days
return 0
# uses https://en.wikipedia.org/wiki/ISO_8601
# http://druid.io/docs/0.8.0/querying/granularities.html
# TODO: pass origin from the UI
@staticmethod
def granularity(period_name, timezone=None, origin=None):
if not period_name or period_name == 'all':
return 'all'
iso_8601_dict = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
}
granularity = {'type': 'period'}
if timezone:
granularity['timeZone'] = timezone
if origin:
dttm = utils.parse_human_datetime(origin)
granularity['origin'] = dttm.isoformat()
if period_name in iso_8601_dict:
granularity['period'] = iso_8601_dict[period_name]
if period_name in ('week_ending_saturday', 'week_starting_sunday'):
# use Sunday as start of the week
granularity['origin'] = '2016-01-03T00:00:00'
elif not isinstance(period_name, string_types):
granularity['type'] = 'duration'
granularity['duration'] = period_name
elif period_name.startswith('P'):
# identify if the string is the iso_8601 period
granularity['period'] = period_name
else:
granularity['type'] = 'duration'
granularity['duration'] = utils.parse_human_timedelta(
period_name).total_seconds() * 1000
return granularity
@staticmethod
def _metrics_and_post_aggs(metrics, metrics_dict):
all_metrics = []
post_aggs = {}
def recursive_get_fields(_conf):
_type = _conf.get('type')
_field = _conf.get('field')
_fields = _conf.get('fields')
field_names = []
if _type in ['fieldAccess', 'hyperUniqueCardinality',
'quantile', 'quantiles']:
field_names.append(_conf.get('fieldName', ''))
if _field:
field_names += recursive_get_fields(_field)
if _fields:
for _f in _fields:
field_names += recursive_get_fields(_f)
return list(set(field_names))
for metric_name in metrics:
metric = metrics_dict[metric_name]
if metric.metric_type != 'postagg':
all_metrics.append(metric_name)
else:
mconf = metric.json_obj
all_metrics += recursive_get_fields(mconf)
all_metrics += mconf.get('fieldNames', [])
if mconf.get('type') == 'javascript':
post_aggs[metric_name] = JavascriptPostAggregator(
name=mconf.get('name', ''),
field_names=mconf.get('fieldNames', []),
function=mconf.get('function', ''))
elif mconf.get('type') == 'quantile':
post_aggs[metric_name] = Quantile(
mconf.get('name', ''),
mconf.get('probability', ''),
)
elif mconf.get('type') == 'quantiles':
post_aggs[metric_name] = Quantiles(
mconf.get('name', ''),
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
mconf.get('fn', "/"),
mconf.get('fields', []),
mconf.get('name', ''))
else:
post_aggs[metric_name] = CustomPostAggregator(
mconf.get('name', ''),
mconf)
return all_metrics, post_aggs
def values_for_column(self,
column_name,
limit=10000):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
else:
from_dttm = datetime(1970, 1, 1)
qry = dict(
datasource=self.datasource_name,
granularity="all",
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
aggregations=dict(count=count("count")),
dimension=column_name,
metric="count",
threshold=limit,
)
client = self.cluster.get_pydruid_client()
client.topn(**qry)
df = client.export_pandas()
return [row[column_name] for row in df.to_records(index=False)]
def get_query_str(self, query_obj, phase=1, client=None):
return self.run_query(client=client, phase=phase, **query_obj)
def _add_filter_from_pre_query_data(self, df, dimensions, dim_filter):
ret = dim_filter
if df is not None and not df.empty:
new_filters = []
for unused, row in df.iterrows():
fields = []
for dim in dimensions:
f = Dimension(dim) == row[dim]
fields.append(f)
if len(fields) > 1:
term = Filter(type="and", fields=fields)
new_filters.append(term)
elif fields:
new_filters.append(fields[0])
if new_filters:
ff = Filter(type="or", fields=new_filters)
if not dim_filter:
ret = ff
else:
ret = Filter(type="and", fields=[ff, dim_filter])
return ret
def run_query( # noqa / druid
self,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
row_limit=None,
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None,
order_desc=True):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
timezone = from_dttm.tzname()
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
columns_dict = {c.column_name: c for c in self.columns}
all_metrics, post_aggs = self._metrics_and_post_aggs(
metrics,
metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
if m.metric_name in all_metrics:
aggregations[m.metric_name] = m.json_obj
rejected_metrics = [
m.metric_name for m in self.metrics
if m.is_restricted and
m.metric_name in aggregations.keys() and
not sm.has_access('metric_access', m.perm)
]
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
)
# the dimensions list with dimensionSpecs expanded
dimensions = []
groupby = [gb for gb in groupby if gb in columns_dict]
for column_name in groupby:
col = columns_dict.get(column_name)
dim_spec = col.dimension_spec
if dim_spec:
dimensions.append(dim_spec)
else:
dimensions.append(column_name)
qry = dict(
datasource=self.datasource_name,
dimensions=dimensions,
aggregations=aggregations,
granularity=DruidDatasource.granularity(
granularity,
timezone=timezone,
origin=extras.get('druid_time_origin'),
),
post_aggregations=post_aggs,
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = self.get_filters(filter)
if filters:
qry['filter'] = filters
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters
order_direction = "descending" if order_desc else "ascending"
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1 and order_desc:
dim = list(qry.get('dimensions'))[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
else:
order_by = list(qry['aggregations'].keys())[0]
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['threshold'] = min(row_limit,
timeseries_limit or row_limit)
pre_qry['metric'] = order_by
pre_qry['dimension'] = dim
del pre_qry['dimensions']
client.topn(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
qry['dimension'] = dim
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters or not order_desc:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
order_by = metrics[0] if metrics else self.metrics[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": min(timeseries_limit, row_limit),
'intervals': (
inner_from_dttm.isoformat() + '/' +
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": order_direction,
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
qry['filter'] = self._add_filter_from_pre_query_data(
df,
qry['dimensions'], filters)
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": order_direction,
}],
}
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str
def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(
client=client, query_obj=query_obj, phase=2)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]
# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]
time_offset = DruidDatasource.time_offset(query_obj['granularity'])
def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)
if DTTM_ALIAS in df.columns and time_offset:
df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
def get_filters(self, raw_filters): # noqa
filters = None
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ('in', 'not in'):
eq = [
types.replace("'", '').strip()
if isinstance(types, string_types)
else types
for types in eq]
elif not isinstance(flt['val'], string_types):
eq = eq[0] if len(eq) > 0 else ''
if col in self.num_cols:
if op in ('in', 'not in'):
eq = [utils.string_to_num(v) for v in eq]
else:
eq = utils.string_to_num(eq)
if op == '==':
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
if len(eq) > 1:
for s in eq:
fields.append(Dimension(col) == s)
cond = Filter(type="or", fields=fields)
elif len(eq) == 1:
cond = Dimension(col) == eq[0]
if op == 'not in':
cond = ~cond
elif op == 'regex':
cond = Filter(type="regex", pattern=eq, dimension=col)
elif op == '>=':
cond = Dimension(col) >= eq
elif op == '<=':
cond = Dimension(col) <= eq
elif op == '>':
cond = Dimension(col) > eq
elif op == '<':
cond = Dimension(col) < eq
if filters:
filters = Filter(type="and", fields=[
cond,
filters
])
else:
filters = cond
return filters
def _get_having_obj(self, col, op, eq):
cond = None
if op == '==':
if col in self.column_names:
cond = DimSelector(dimension=col, value=eq)
else:
cond = Aggregation(col) == eq
elif op == '>':
cond = Aggregation(col) > eq
elif op == '<':
cond = Aggregation(col) < eq
return cond
def get_having_filters(self, raw_filters):
filters = None
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
}
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ['==', '>', '<']:
cond = self._get_having_obj(col, op, eq)
elif op in reversed_op_map:
cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
if filters:
filters = filters & cond
else:
filters = cond
return filters
@classmethod
def query_datasources_by_name(
cls, session, database, datasource_name, schema=None):
return (
session.query(cls)
.filter_by(cluster_name=database.id)
.filter_by(datasource_name=datasource_name)
.all()
)
sa.event.listen(DruidDatasource, 'after_insert', set_perm)
sa.event.listen(DruidDatasource, 'after_update', set_perm)
| superset/connectors/druid/models.py | 1 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0005261532496660948,
0.0001805723732104525,
0.00016256906383205205,
0.00017020403174683452,
0.0000496961765747983
]
|
{
"id": 6,
"code_window": [
"\n",
"from flask import Markup, flash, redirect\n",
"from flask_appbuilder import CompactCRUDMixin, expose\n",
"from flask_appbuilder.models.sqla.interface import SQLAInterface\n",
"import sqlalchemy as sa\n",
"\n",
"from flask_babel import lazy_gettext as _\n",
"from flask_babel import gettext as __\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "superset/connectors/sqla/views.py",
"type": "replace",
"edit_start_line_idx": 7
} | import { it, describe } from 'mocha';
import { expect } from 'chai';
import sinon from 'sinon';
import $ from 'jquery';
import * as exploreUtils from '../../../javascripts/explore/exploreUtils';
import * as actions from '../../../javascripts/explore/actions/chartActions';
describe('chart actions', () => {
let dispatch;
let urlStub;
let ajaxStub;
let request;
beforeEach(() => {
dispatch = sinon.spy();
urlStub = sinon.stub(exploreUtils, 'getExploreUrl').callsFake(() => ('mockURL'));
ajaxStub = sinon.stub($, 'ajax');
});
afterEach(() => {
urlStub.restore();
ajaxStub.restore();
});
it('should handle query timeout', () => {
ajaxStub.yieldsTo('error', { statusText: 'timeout' });
request = actions.runQuery({});
request(dispatch, sinon.stub().returns({
explore: {
controls: [],
},
}));
expect(dispatch.callCount).to.equal(3);
expect(dispatch.args[0][0].type).to.equal(actions.CHART_UPDATE_TIMEOUT);
});
});
| superset/assets/spec/javascripts/explore/chartActions_spec.js | 0 | https://github.com/apache/superset/commit/64ef8b14b4f7b7917a8bab4d22e21106b91b7262 | [
0.0001724978646961972,
0.00017161578580271453,
0.00017030218441504985,
0.00017183154704980552,
8.41448638766451e-7
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.