file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
gsp-long-revalidate.js
|
import fs from 'fs'
import path from 'path'
let gspCalls = 0
export async function getStaticProps() {
const data = await fs.promises.readFile(
path.join(process.cwd(), 'data.txt'),
'utf8'
)
gspCalls += 1
|
if (data.trim() === 'hide') {
return {
notFound: true,
revalidate: 1,
}
}
return {
props: {
hello: 'world',
data,
gspCalls,
},
revalidate: 100,
}
}
export default function Page(props) {
return (
<>
<p id="gsp">getStaticProps page</p>
<p id="props">{JSON.stringify(props)}</p>
</>
)
}
| |
classPropertyIsPublicByDefault.js
|
//// [classPropertyIsPublicByDefault.ts]
class C {
x: string;
get y() { return null; }
set y(x) { }
foo() { }
static a: string;
static get b() { return null; }
static set b(x) { }
static foo() { }
}
var c: C;
c.x;
c.y;
c.y = 1;
c.foo();
C.a;
C.b();
C.b = 1;
C.foo();
//// [classPropertyIsPublicByDefault.js]
var C = /** @class */ (function () {
function
|
() {
}
Object.defineProperty(C.prototype, "y", {
get: function () { return null; },
set: function (x) { },
enumerable: true,
configurable: true
});
C.prototype.foo = function () { };
Object.defineProperty(C, "b", {
get: function () { return null; },
set: function (x) { },
enumerable: true,
configurable: true
});
C.foo = function () { };
return C;
}());
var c;
c.x;
c.y;
c.y = 1;
c.foo();
C.a;
C.b();
C.b = 1;
C.foo();
|
C
|
exceptions.py
|
class ReadTimeout(Exception):
pass
class FatalError(Exception):
pass
class SynchronizationError(Exception):
pass
class PayloadOverflow(Exception):
pass
class
|
(Exception):
pass
|
ConnectionLost
|
task3_doc2vec_svm.py
|
import data_loader
import numpy as np
import pandas as pd
import re
import os.path
from itertools import product
from string import ascii_lowercase
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.decomposition import PCA, TruncatedSVD
from sklearn.metrics import roc_auc_score, accuracy_score, classification_report
from sklearn.svm import SVC
from sklearn.preprocessing import LabelEncoder
from sklearn.feature_extraction.text import TfidfTransformer, TfidfVectorizer
from sklearn.model_selection import GridSearchCV
from sklearn.externals import joblib
from sklearn.linear_model import ElasticNet
def
|
():
cols = ["dim_{}".format(i+1) for i in range(300)] + ["label"] + ["meta_id"]
train_df = pd.read_csv("dataset/data_doc2vec_non-pretrained/train_docvec_lang_sentid.csv", names=cols)
dev_df = pd.read_csv("dataset/data_doc2vec_non-pretrained/dev_docvec_lang_sentid.csv", names=cols)
test_df = pd.read_csv("dataset/data_doc2vec_non-pretrained/test_docvec_lang_sentid.csv", names=cols)
train_actual = train_df["label"].tolist()
dev_actual = dev_df["label"].tolist()
test_actual = test_df["label"].tolist()
train_data_df = train_df.iloc[:,:(len(cols)-2)]
dev_data_df = dev_df.iloc[:,:(len(cols)-2)]
test_data_df = test_df.iloc[:,:(len(cols)-2)]
print("Start pipeline")
pipeline = Pipeline([
('clf', SVC(decision_function_shape='ovo', C=20)),
])
param_grid = dict(
clf__kernel=['rbf'],
clf__C=[0.1, 1, 10, 20])
grid_search = GridSearchCV(pipeline, param_grid=param_grid, cv=5, verbose=10, return_train_score=True)
train_pred = grid_search.fit(train_data_df, train_actual).predict(train_data_df)
test_pred = grid_search.predict(test_data_df)
with open("task3_test_doc2vec.txt", "w") as output:
output.write("Training results:\n{}\n".format(classification_report(train_actual, train_pred)))
output.write("Testing results:\n{}\n".format(classification_report(test_actual, test_pred)))
print(classification_report(train_actual, train_pred))
print(classification_report(test_actual, test_pred))
pd.DataFrame(grid_search.cv_results_).to_csv("task3_grid_search_doc2vec.csv")
if __name__ == "__main__":
main()
|
main
|
server.py
|
import datetime
from platform import uname
from flask import Flask
server = Flask(__name__)
@server.route("/")
def
|
():
return uname()[0] + " | " + uname()[1] + " | " + str(datetime.datetime.now())
if __name__ == "__main__":
server.run(host='0.0.0.0')
|
getgreeting
|
ext-rating.component.js
|
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
import * as tslib_1 from "tslib";
import { Component, ElementRef, forwardRef } from '@angular/core';
import { base } from './base';
var ratingMetaData = /** @class */ (function () {
function ratingMetaData() {
}
ratingMetaData.XTYPE = 'rating';
ratingMetaData.PROPERTIESOBJECT = {
"animate": "Boolean/Object",
"ariaAttributes": "Object",
"ariaDescribedBy": "String",
"ariaLabel": "String",
"ariaLabelledBy": "String",
"bind": "Object/String",
"border": "Boolean",
"cls": "String/String[]",
"controller": "String/Object/Ext.app.ViewController",
"defaultListenerScope": "Boolean",
"disabled": "Boolean",
"eventHandlers": "Object",
"family": "String",
"focusCls": "String",
"glyphs": "String/String[]/Number[]",
"height": "Number/String",
"hidden": "Boolean",
"hideMode": "'clip'/'display'/'offsets'/'opacity'/'visibility'",
"instanceCls": "String/String[]",
"keyMap": "Object",
"keyMapEnabled": "Boolean",
"keyMapTarget": "String",
"limit": "Number",
"listeners": "Object",
"minimum": "Number",
"name": "String",
"nameable": "Boolean",
"overStyle": "String/Object",
"plugins": "Array/Ext.enums.Plugin/Object/Ext.plugin.Abstract",
"publishes": "String/String[]/Object",
"reference": "String",
"renderTo": "Ext.Element",
"ripple": "Boolean/Object/String",
"rounding": "Number",
"scale": "String",
"selectedStyle": "String/Object",
"session": "Boolean/Object/Ext.data.Session",
"shareableName": "Boolean",
"style": "String/Object",
"tip": "Object/String/String[]/Ext.XTemplate/Function",
"tooltipText": "String",
"touchAction": "Object",
"trackingValue": "Number",
"trackOver": "Boolean",
"twoWayBindable": "String/String[]/Object",
"ui": "String/String[]",
"userCls": "String/String[]",
"value": "Number",
"viewModel": "String/Object/Ext.app.ViewModel",
"width": "Number/String",
"platformConfig": "Object",
"fitToParent": "Boolean",
"config": "Object",
};
ratingMetaData.PROPERTIES = [
'animate',
'ariaAttributes',
'ariaDescribedBy',
'ariaLabel',
'ariaLabelledBy',
'bind',
'border',
'cls',
'controller',
'defaultListenerScope',
'disabled',
'eventHandlers',
'family',
'focusCls',
'glyphs',
'height',
'hidden',
'hideMode',
'instanceCls',
'keyMap',
'keyMapEnabled',
'keyMapTarget',
'limit',
'listeners',
'minimum',
'name',
'nameable',
'overStyle',
'plugins',
'publishes',
'reference',
'renderTo',
'ripple',
'rounding',
'scale',
'selectedStyle',
'session',
'shareableName',
'style',
'tip',
'tooltipText',
'touchAction',
'trackingValue',
'trackOver',
'twoWayBindable',
'ui',
'userCls',
'value',
'viewModel',
'width',
'platformConfig',
'fitToParent',
'config'
];
ratingMetaData.EVENTS = [
{ name: 'beforedisabledchange', parameters: 'sender,value,oldValue,undefined' },
{ name: 'beforeheightchange', parameters: 'sender,value,oldValue,undefined' },
{ name: 'beforehiddenchange', parameters: 'sender,value,oldValue,undefined' },
{ name: 'beforewidthchange', parameters: 'sender,value,oldValue,undefined' },
{ name: 'blur', parameters: 'rating,event' },
{ name: 'disabledchange', parameters: 'sender,value,oldValue' },
{ name: 'focus', parameters: 'rating,event' },
{ name: 'focusenter', parameters: 'rating,event' },
{ name: 'focusleave', parameters: 'rating,event' },
{ name: 'heightchange', parameters: 'sender,value,oldValue' },
{ name: 'hiddenchange', parameters: 'sender,value,oldValue' },
{ name: 'widthchange', parameters: 'sender,value,oldValue' },
{ name: 'ready', parameters: '' }
];
ratingMetaData.EVENTNAMES = [
'beforedisabledchange',
'beforeheightchange',
'beforehiddenchange',
'beforewidthchange',
'blur',
'disabledchange',
'focus',
'focusenter',
|
'ready'
];
return ratingMetaData;
}());
export { ratingMetaData };
if (false) {
/** @type {?} */
ratingMetaData.XTYPE;
/** @type {?} */
ratingMetaData.PROPERTIESOBJECT;
/** @type {?} */
ratingMetaData.PROPERTIES;
/** @type {?} */
ratingMetaData.EVENTS;
/** @type {?} */
ratingMetaData.EVENTNAMES;
}
var ExtRatingComponent = /** @class */ (function (_super) {
tslib_1.__extends(ExtRatingComponent, _super);
function ExtRatingComponent(eRef) {
return _super.call(this, eRef, ratingMetaData) || this;
}
/**
* @return {?}
*/
ExtRatingComponent.prototype.ngOnInit = /**
* @return {?}
*/
function () { this.baseOnInit(ratingMetaData); };
//public ngOnChanges(changes: SimpleChanges) {this.baseOnChanges(changes)}
//public ngOnChanges(changes: SimpleChanges) {this.baseOnChanges(changes)}
/**
* @return {?}
*/
ExtRatingComponent.prototype.ngAfterContentInit =
//public ngOnChanges(changes: SimpleChanges) {this.baseOnChanges(changes)}
/**
* @return {?}
*/
function () {
this.baseAfterContentInit();
this['ready'].emit(this);
};
ExtRatingComponent.decorators = [
{ type: Component, args: [{
selector: 'rating',
inputs: ratingMetaData.PROPERTIES,
outputs: ratingMetaData.EVENTNAMES,
providers: [{ provide: base, useExisting: forwardRef(function () { return ExtRatingComponent; }) }],
template: '<ng-template #dynamic></ng-template>'
}] }
];
/** @nocollapse */
ExtRatingComponent.ctorParameters = function () { return [
{ type: ElementRef }
]; };
return ExtRatingComponent;
}(base));
export { ExtRatingComponent };
//# sourceMappingURL=data:application/json;base64,{"version":3,"file":"ext-rating.component.js","sourceRoot":"ng://@sencha/ext-angular-modern/","sources":["lib/ext-rating.component.ts"],"names":[],"mappings":";;;;;AAAA,OAAO,EAKL,SAAS,EACT,UAAU,EACV,UAAU,EACX,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AAC9B;IAAA;IA8IA,CAAC;IA7Ie,oBAAK,GAAW,QAAQ,CAAC;IACzB,+BAAgB,GAAQ;QACpC,SAAS,EAAE,gBAAgB;QAC3B,gBAAgB,EAAE,QAAQ;QAC1B,iBAAiB,EAAE,QAAQ;QAC3B,WAAW,EAAE,QAAQ;QACrB,gBAAgB,EAAE,QAAQ;QAC1B,MAAM,EAAE,eAAe;QACvB,QAAQ,EAAE,SAAS;QACnB,KAAK,EAAE,iBAAiB;QACxB,YAAY,EAAE,sCAAsC;QACpD,sBAAsB,EAAE,SAAS;QACjC,UAAU,EAAE,SAAS;QACrB,eAAe,EAAE,QAAQ;QACzB,QAAQ,EAAE,QAAQ;QAClB,UAAU,EAAE,QAAQ;QACpB,QAAQ,EAAE,0BAA0B;QACpC,QAAQ,EAAE,eAAe;QACzB,QAAQ,EAAE,SAAS;QACnB,UAAU,EAAE,mDAAmD;QAC/D,aAAa,EAAE,iBAAiB;QAChC,QAAQ,EAAE,QAAQ;QAClB,eAAe,EAAE,SAAS;QAC1B,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,WAAW,EAAE,QAAQ;QACrB,SAAS,EAAE,QAAQ;QACnB,MAAM,EAAE,QAAQ;QAChB,UAAU,EAAE,SAAS;QACrB,WAAW,EAAE,eAAe;QAC5B,SAAS,EAAE,mDAAmD;QAC9D,WAAW,EAAE,wBAAwB;QACrC,WAAW,EAAE,QAAQ;QACrB,UAAU,EAAE,aAAa;QACzB,QAAQ,EAAE,uBAAuB;QACjC,UAAU,EAAE,QAAQ;QACpB,OAAO,EAAE,QAAQ;QACjB,eAAe,EAAE,eAAe;QAChC,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE,SAAS;QAC1B,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,+CAA+C;QACtD,aAAa,EAAE,QAAQ;QACvB,aAAa,EAAE,QAAQ;QACvB,eAAe,EAAE,QAAQ;QACzB,WAAW,EAAE,SAAS;QACtB,gBAAgB,EAAE,wBAAwB;QAC1C,IAAI,EAAE,iBAAiB;QACvB,SAAS,EAAE,iBAAiB;QAC5B,OAAO,EAAE,QAAQ;QACjB,WAAW,EAAE,iCAAiC;QAC9C,OAAO,EAAE,eAAe;QACxB,gBAAgB,EAAE,QAAQ;QAC1B,aAAa,EAAE,SAAS;QACxB,QAAQ,EAAE,QAAQ;KACrB,CAAC;IACc,yBAAU,GAAa;QACnC,SAAS;QACT,gBAAgB;QAChB,iBAAiB;QACjB,WAAW;QACX,gBAAgB;QAChB,MAAM;QACN,QAAQ;QACR,KAAK;QACL,YAAY;QACZ,sBAAsB;QACtB,UAAU;QACV,eAAe;QACf,QAAQ;QACR,UAAU;QACV,QAAQ;QACR,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,aAAa;QACb,QAAQ;QACR,eAAe;QACf,cAAc;QACd,OAAO;QACP,WAAW;QACX,SAAS;QACT,MAAM;QACN,UAAU;QACV,WAAW;QACX,SAAS;QACT,WAAW;QACX,WAAW;QACX,UAAU;QACV,QAAQ;QACR,UAAU;QACV,OAAO;QACP,eAAe;QACf,SAAS;QACT,eAAe;QACf,OAAO;QACP,KAAK;QACL,aAAa;QACb,aAAa;QACb,eAAe;QACf,WAAW;QACX,gBAAgB;QAChB,IAAI;QACJ,SAAS;QACT,OAAO;QACP,WAAW;QACX,OAAO;QACP,gBAAgB;QAChB,aAAa;QACb,QAAQ;KACX,CAAC;IACc,qBAAM,GAAU;QAC9B,EAAC,IAAI,EAAC,sBAAsB,EAAC,UAAU,EAAC,iCAAiC,EAAC;QAC1E,EAAC,IAAI,EAAC,oBAAoB,EAAC,UAAU,EAAC,iCAAiC,EAAC;QACxE,EAAC,IAAI,EAAC,oBAAoB,EAAC,UAAU,EAAC,iCAAiC,EAAC;QACxE,EAAC,IAAI,EAAC,mBAAmB,EAAC,UAAU,EAAC,iCAAiC,EAAC;QACvE,EAAC,IAAI,EAAC,MAAM,EAAC,UAAU,EAAC,cAAc,EAAC;QACvC,EAAC,IAAI,EAAC,gBAAgB,EAAC,UAAU,EAAC,uBAAuB,EAAC;QAC1D,EAAC,IAAI,EAAC,OAAO,EAAC,UAAU,EAAC,cAAc,EAAC;QACxC,EAAC,IAAI,EAAC,YAAY,EAAC,UAAU,EAAC,cAAc,EAAC;QAC7C,EAAC,IAAI,EAAC,YAAY,EAAC,UAAU,EAAC,cAAc,EAAC;QAC7C,EAAC,IAAI,EAAC,cAAc,EAAC,UAAU,EAAC,uBAAuB,EAAC;QACxD,EAAC,IAAI,EAAC,cAAc,EAAC,UAAU,EAAC,uBAAuB,EAAC;QACxD,EAAC,IAAI,EAAC,aAAa,EAAC,UAAU,EAAC,uBAAuB,EAAC;QACvD,EAAC,IAAI,EAAC,OAAO,EAAC,UAAU,EAAC,EAAE,EAAC;KAC7B,CAAC;IACc,yBAAU,GAAa;QACrC,sBAAsB;QACtB,oBAAoB;QACpB,oBAAoB;QACpB,mBAAmB;QACnB,MAAM;QACN,gBAAgB;QAChB,OAAO;QACP,YAAY;QACZ,YAAY;QACZ,cAAc;QACd,cAAc;QACd,aAAa;QACb,OAAO;KACR,CAAC;IACF,qBAAC;CAAA,AA9ID,IA8IC;SA9IY,cAAc;;;IACzB,qBAAuC;;IACvC,gCAsDA;;IACA,0BAsDA;;IACA,sBAcA;;IACA,0BAcA;;AAEF;IAOwC,8CAAI;IAC1C,4BAAY,IAAe;eAAG,kBAAM,IAAI,EAAC,cAAc,CAAC;IAAA,CAAC;;;;IAClD,qCAAQ;;;IAAf,cAAmB,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,CAAA,CAAA,CAAC;IACnD,0EAA0E;;;;;IACnE,+CAAkB;;;;;IAAzB;QACE,IAAI,CAAC,oBAAoB,EAAE,CAAA;QAC3B,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;IACxB,CAAC;;gBAdJ,SAAS,SAAC;oBACT,QAAQ,EAAE,QAAQ;oBAClB,MAAM,EAAE,cAAc,CAAC,UAAU;oBACjC,OAAO,EAAE,cAAc,CAAC,UAAU;oBAClC,SAAS,EAAE,CAAC,EAAC,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,UAAU,CAAC,cAAM,OAAA,kBAAkB,EAAlB,CAAkB,CAAC,EAAC,CAAC;oBAC/E,QAAQ,EAAE,sCAAsC;iBACjD;;;;gBAzJC,UAAU;;IAkKZ,yBAAC;CAAA,AAfD,CAOwC,IAAI,GAQ3C;SARY,kBAAkB","sourcesContent":["import {\n  Output,\n  OnInit,\n  AfterContentInit,\n  OnChanges,\n  Component,\n  ElementRef,\n  forwardRef\n} from '@angular/core';\nimport { base } from './base';\nexport class ratingMetaData {\n  public static XTYPE: string = 'rating';\n  public static PROPERTIESOBJECT: any = {\n    \"animate\": \"Boolean/Object\",\n    \"ariaAttributes\": \"Object\",\n    \"ariaDescribedBy\": \"String\",\n    \"ariaLabel\": \"String\",\n    \"ariaLabelledBy\": \"String\",\n    \"bind\": \"Object/String\",\n    \"border\": \"Boolean\",\n    \"cls\": \"String/String[]\",\n    \"controller\": \"String/Object/Ext.app.ViewController\",\n    \"defaultListenerScope\": \"Boolean\",\n    \"disabled\": \"Boolean\",\n    \"eventHandlers\": \"Object\",\n    \"family\": \"String\",\n    \"focusCls\": \"String\",\n    \"glyphs\": \"String/String[]/Number[]\",\n    \"height\": \"Number/String\",\n    \"hidden\": \"Boolean\",\n    \"hideMode\": \"'clip'/'display'/'offsets'/'opacity'/'visibility'\",\n    \"instanceCls\": \"String/String[]\",\n    \"keyMap\": \"Object\",\n    \"keyMapEnabled\": \"Boolean\",\n    \"keyMapTarget\": \"String\",\n    \"limit\": \"Number\",\n    \"listeners\": \"Object\",\n    \"minimum\": \"Number\",\n    \"name\": \"String\",\n    \"nameable\": \"Boolean\",\n    \"overStyle\": \"String/Object\",\n    \"plugins\": \"Array/Ext.enums.Plugin/Object/Ext.plugin.Abstract\",\n    \"publishes\": \"String/String[]/Object\",\n    \"reference\": \"String\",\n    \"renderTo\": \"Ext.Element\",\n    \"ripple\": \"Boolean/Object/String\",\n    \"rounding\": \"Number\",\n    \"scale\": \"String\",\n    \"selectedStyle\": \"String/Object\",\n    \"session\": \"Boolean/Object/Ext.data.Session\",\n    \"shareableName\": \"Boolean\",\n    \"style\": \"String/Object\",\n    \"tip\": \"Object/String/String[]/Ext.XTemplate/Function\",\n    \"tooltipText\": \"String\",\n    \"touchAction\": \"Object\",\n    \"trackingValue\": \"Number\",\n    \"trackOver\": \"Boolean\",\n    \"twoWayBindable\": \"String/String[]/Object\",\n    \"ui\": \"String/String[]\",\n    \"userCls\": \"String/String[]\",\n    \"value\": \"Number\",\n    \"viewModel\": \"String/Object/Ext.app.ViewModel\",\n    \"width\": \"Number/String\",\n    \"platformConfig\": \"Object\",\n    \"fitToParent\": \"Boolean\",\n    \"config\": \"Object\",\n};\n  public static PROPERTIES: string[] = [\n    'animate',\n    'ariaAttributes',\n    'ariaDescribedBy',\n    'ariaLabel',\n    'ariaLabelledBy',\n    'bind',\n    'border',\n    'cls',\n    'controller',\n    'defaultListenerScope',\n    'disabled',\n    'eventHandlers',\n    'family',\n    'focusCls',\n    'glyphs',\n    'height',\n    'hidden',\n    'hideMode',\n    'instanceCls',\n    'keyMap',\n    'keyMapEnabled',\n    'keyMapTarget',\n    'limit',\n    'listeners',\n    'minimum',\n    'name',\n    'nameable',\n    'overStyle',\n    'plugins',\n    'publishes',\n    'reference',\n    'renderTo',\n    'ripple',\n    'rounding',\n    'scale',\n    'selectedStyle',\n    'session',\n    'shareableName',\n    'style',\n    'tip',\n    'tooltipText',\n    'touchAction',\n    'trackingValue',\n    'trackOver',\n    'twoWayBindable',\n    'ui',\n    'userCls',\n    'value',\n    'viewModel',\n    'width',\n    'platformConfig',\n    'fitToParent',\n    'config'\n];\n  public static EVENTS: any[] = [\n\t\t{name:'beforedisabledchange',parameters:'sender,value,oldValue,undefined'},\n\t\t{name:'beforeheightchange',parameters:'sender,value,oldValue,undefined'},\n\t\t{name:'beforehiddenchange',parameters:'sender,value,oldValue,undefined'},\n\t\t{name:'beforewidthchange',parameters:'sender,value,oldValue,undefined'},\n\t\t{name:'blur',parameters:'rating,event'},\n\t\t{name:'disabledchange',parameters:'sender,value,oldValue'},\n\t\t{name:'focus',parameters:'rating,event'},\n\t\t{name:'focusenter',parameters:'rating,event'},\n\t\t{name:'focusleave',parameters:'rating,event'},\n\t\t{name:'heightchange',parameters:'sender,value,oldValue'},\n\t\t{name:'hiddenchange',parameters:'sender,value,oldValue'},\n\t\t{name:'widthchange',parameters:'sender,value,oldValue'},\n\t\t{name:'ready',parameters:''}\n];\n  public static EVENTNAMES: string[] = [\n\t\t'beforedisabledchange',\n\t\t'beforeheightchange',\n\t\t'beforehiddenchange',\n\t\t'beforewidthchange',\n\t\t'blur',\n\t\t'disabledchange',\n\t\t'focus',\n\t\t'focusenter',\n\t\t'focusleave',\n\t\t'heightchange',\n\t\t'hiddenchange',\n\t\t'widthchange',\n\t\t'ready'\n];\n}\n@Component({\n  selector: 'rating', \n  inputs: ratingMetaData.PROPERTIES,\n  outputs: ratingMetaData.EVENTNAMES,\n  providers: [{provide: base, useExisting: forwardRef(() => ExtRatingComponent)}],\n  template: '<ng-template #dynamic></ng-template>'\n})\nexport class ExtRatingComponent extends base implements OnInit,AfterContentInit,OnChanges {\n  constructor(eRef:ElementRef) {super(eRef,ratingMetaData)}\n  public ngOnInit() {this.baseOnInit(ratingMetaData)}\n  //public ngOnChanges(changes: SimpleChanges) {this.baseOnChanges(changes)}\n  public ngAfterContentInit() {\n    this.baseAfterContentInit()\n    this['ready'].emit(this)\n    }\n}"]}
|
'focusleave',
'heightchange',
'hiddenchange',
'widthchange',
|
systemcursor.py
|
from pyglet.libs.darwin.objc_runtime import *
# This class is a wrapper around NSCursor which prevents us from
# sending too many hide or unhide messages in a row. Apparently
# NSCursor treats them like retain/release messages, which can be
# problematic when we are e.g. switching between window & fullscreen.
class SystemCursor:
cursor_is_hidden = False
@classmethod
def hide(cls):
if not cls.cursor_is_hidden:
send_message('NSCursor', 'hide')
cls.cursor_is_hidden = True
@classmethod
def
|
(cls):
if cls.cursor_is_hidden:
send_message('NSCursor', 'unhide')
cls.cursor_is_hidden = False
|
unhide
|
riscv64gc_unknown_linux_gnu.rs
|
use crate::spec::{CodeModel, LinkerFlavor, Target, TargetOptions, TargetResult};
pub fn target() -> TargetResult {
Ok(Target {
llvm_target: "riscv64-unknown-linux-gnu".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "64".to_string(),
target_c_int_width: "32".to_string(),
|
target_os: "linux".to_string(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc,
options: TargetOptions {
unsupported_abis: super::riscv_base::unsupported_abis(),
code_model: Some(CodeModel::Medium),
cpu: "generic-rv64".to_string(),
features: "+m,+a,+f,+d,+c".to_string(),
llvm_abiname: "lp64d".to_string(),
max_atomic_width: Some(64),
..super::linux_base::opts()
},
})
}
|
target_env: "gnu".to_string(),
data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".to_string(),
arch: "riscv64".to_string(),
|
mod.rs
|
/*
Copyright 2021 Integritee AG and Supercomputing Systems AG
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
pub mod types;
use itp_types::{AccountId, Enclave, Header};
use sp_runtime::traits::Header as HeaderTrait;
use std::time::Duration;
pub const SLOT_DURATION: Duration = Duration::from_millis(300);
pub fn validateer(account: AccountId) -> Enclave {
Enclave::new(account, Default::default(), Default::default(), Default::default())
}
pub fn
|
() -> Header {
Header::new(
Default::default(),
Default::default(),
Default::default(),
Default::default(),
Default::default(),
)
}
|
default_header
|
ordering.py
|
"""
SoftLayer.ordering
~~~~~~~~~~~~~~~~~~
Ordering Manager
:license: MIT, see LICENSE for more details.
"""
class OrderingManager(object):
"""Manages hardware devices.
:param SoftLayer.API.Client client: an API client instance
"""
def __init__(self, client):
self.client = client
def get_packages_of_type(self, package_types, mask=None):
"""Get packages that match a certain type.
Each ordering package has a type, so return all packages that match
the types we are looking for
:param list package_types: List of strings representing the package
type keynames we are interested in.
:param string mask: Mask to specify the properties we want to retrieve
"""
package_service = self.client['Product_Package']
_filter = {
'type': {
'keyName': {
'operation': 'in',
'options': [
{'name': 'data',
'value': package_types}
],
},
},
}
packages = package_service.getAllObjects(mask=mask, filter=_filter)
packages = self.filter_outlet_packages(packages)
return packages
@staticmethod
def filter_outlet_packages(packages):
"""Remove packages designated as OUTLET.
Those type of packages must be handled in a different way,
and they are not supported at the moment.
:param packages: Dictionary of packages. Name and description keys
must be present in each of them.
"""
non_outlet_packages = []
for package in packages:
if all(['OUTLET' not in package.get('description', '').upper(),
'OUTLET' not in package.get('name', '').upper()]):
non_outlet_packages.append(package)
return non_outlet_packages
@staticmethod
def get_only_active_packages(packages):
"""Return only active packages.
If a package is active, it is eligible for ordering
This will inspect the 'isActive' property on the provided packages
:param packages Dictionary of packages, isActive key must be present
"""
active_packages = []
for package in packages:
if package['isActive']:
active_packages.append(package)
return active_packages
def get_package_by_type(self, package_type, mask=None):
"""Get a single package of a given type.
Syntactic sugar to retrieve a single package of a given type.
If multiple packages share the given type, this will return the first
one returned by the API.
If no packages are found, returns None
:param package_type string representing the package type key name
we are interested in
"""
packages = self.get_packages_of_type([package_type], mask)
if len(packages) == 0:
return None
else:
return packages.pop()
def get_package_id_by_type(self, package_type):
"""Return the package ID of a Product Package with a given type.
:param package_type string representing the package type key name
we are interested in
:raises ValueError when no package of the given type is found
"""
mask = "mask[id, name, description, isActive, type[keyName]]"
package = self.get_package_by_type(package_type, mask)
if package:
return package['id']
else:
raise ValueError("No package found for type: " + package_type)
def get_quotes(self):
"""Retrieve a list of quotes.
:return a list of SoftLayer_Billing_Order_Quote
"""
quotes = self.client['Account'].getActiveQuotes()
return quotes
def get_quote_details(self, quote_id):
"""Retrieve quote details.
:param quote_id ID number of target quote
"""
quote = self.client['Billing_Order_Quote'].getObject(id=quote_id)
return quote
def get_order_container(self, quote_id):
"""Generate an order container from a quote object.
:param quote_id ID number of target quote
"""
quote = self.client['Billing_Order_Quote']
container = quote.getRecalculatedOrderContainer(id=quote_id)
return container['orderContainers'][0]
def generate_order_template(self, quote_id, extra, quantity=1):
"""Generate a complete order template.
:param int quote_id: ID of target quote
:param list extra: List of dictionaries that have extra details about
the order such as hostname or domain names for
virtual servers or hardware nodes
:param int quantity: Number of ~things~ to order
"""
|
container = self.get_order_container(quote_id)
container['quantity'] = quantity
# NOTE(kmcdonald): This will only work with virtualGuests and hardware.
# There has to be a better way, since this is based on
# an existing quote that supposedly knows about this
# detail
if container['packageId'] == 46:
product_type = 'virtualGuests'
else:
product_type = 'hardware'
if len(extra) != quantity:
raise ValueError("You must specify extra for each server in the "
"quote")
container[product_type] = []
for extra_details in extra:
container[product_type].append(extra_details)
container['presetId'] = None
return container
def verify_quote(self, quote_id, extra, quantity=1):
"""Verifies that a quote order is valid.
:param int quote_id: ID for the target quote
:param list hostnames: hostnames of the servers
:param string domain: domain of the new servers
:param int quantity: Quantity to override default
"""
container = self.generate_order_template(quote_id, extra,
quantity=quantity)
return self.client['Product_Order'].verifyOrder(container)
def order_quote(self, quote_id, extra, quantity=1):
"""Places an order using a quote
:param int quote_id: ID for the target quote
:param list hostnames: hostnames of the servers
:param string domain: domain of the new server
:param int quantity: Quantity to override default
"""
container = self.generate_order_template(quote_id, extra,
quantity=quantity)
return self.client['Product_Order'].placeOrder(container)
| |
key.go
|
// Copyright 2021 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package encoding
import (
"encoding/binary"
"github.com/pingcap/log"
"github.com/tikv/migration/cdc/cdc/model"
"go.uber.org/zap"
)
// DecodeKey decodes a key to uniqueID, tableID, startTs, CRTs.
func DecodeKey(key []byte) (uniqueID uint32, tableID uint64, startTs, CRTs uint64)
|
// EncodeTsKey encodes uniqueID, tableID, CRTs.
func EncodeTsKey(uniqueID uint32, tableID uint64, ts uint64) []byte {
// uniqueID, tableID, CRTs.
buf := make([]byte, 0, 4+8+8)
uint64Buf := [8]byte{}
// uniqueID
binary.BigEndian.PutUint32(uint64Buf[:], uniqueID)
buf = append(buf, uint64Buf[:4]...)
// tableID
binary.BigEndian.PutUint64(uint64Buf[:], tableID)
buf = append(buf, uint64Buf[:]...)
// CRTs
binary.BigEndian.PutUint64(uint64Buf[:], ts)
return append(buf, uint64Buf[:]...)
}
// EncodeKey encodes a key according to event.
// Format: uniqueID, tableID, CRTs, startTs, Put/Delete, Key.
func EncodeKey(uniqueID uint32, tableID uint64, event *model.PolymorphicEvent) []byte {
if event.RawKV == nil {
log.Panic("rawkv must not be nil", zap.Any("event", event))
}
// uniqueID, tableID, CRTs, startTs, Put/Delete, Key
length := 4 + 8 + 8 + 8 + 2 + len(event.RawKV.Key)
buf := make([]byte, 0, length)
uint64Buf := [8]byte{}
// uniqueID
binary.BigEndian.PutUint32(uint64Buf[:], uniqueID)
buf = append(buf, uint64Buf[:4]...)
// table ID
binary.BigEndian.PutUint64(uint64Buf[:], tableID)
buf = append(buf, uint64Buf[:]...)
// CRTs
binary.BigEndian.PutUint64(uint64Buf[:], event.CRTs)
buf = append(buf, uint64Buf[:]...)
// startTs
binary.BigEndian.PutUint64(uint64Buf[:], event.StartTs)
buf = append(buf, uint64Buf[:]...)
// Let Delete < Put
binary.BigEndian.PutUint16(uint64Buf[:], ^uint16(event.RawKV.OpType))
buf = append(buf, uint64Buf[:2]...)
// key
return append(buf, event.RawKV.Key...)
}
|
{
// uniqueID, tableID, CRTs, startTs, Key, Put/Delete
// uniqueID
uniqueID = binary.BigEndian.Uint32(key)
// table ID
tableID = binary.BigEndian.Uint64(key[4:])
// CRTs
CRTs = binary.BigEndian.Uint64(key[12:])
if len(key) >= 28 {
// startTs
startTs = binary.BigEndian.Uint64(key[20:])
}
return
}
|
29.e625753f.js
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[29],{348:function(t,s,a){},421:function(t,s,a){"use strict";a(348)},589:function(t,s,a){"use strict";a.r(s);a(421);var r=a(25),n=Object(r.a)({},(function(){var t=this,s=t.$createElement,a=t._self._c||s;return a("ContentSlotsDistributor",{attrs:{"slot-key":t.$parent.slotKey}},[a("h1",{attrs:{id:"开源作品"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#开源作品"}},[t._v("#")]),t._v(" 开源作品")]),t._v(" "),a("div",{attrs:{align:"center"}},[a("span",{staticClass:"logo"},[t._v(" 程序员思语 ")])]),t._v(" "),a("h1",{attrs:{id:"自我介绍"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#自我介绍"}},[t._v("#")]),t._v(" "),a("H2Icon"),t._v("自我介绍")],1),t._v(" "),a("p",[t._v("你好,我是 "),a("strong",[t._v("思语")]),t._v("。微信搜索「程序员思语」关注我,长期交流学习。")]),t._v(" "),a("p",[t._v("我运营了公众号「程序员思语」,关注回复「pdf」限时获取前端优质书籍pdf。"),a("strong",[t._v("公众号经常更新,值得你关注学习,每天进步一点点")]),t._v("。")]),t._v(" "),a("h1",{attrs:{id:"开源项目简介"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#开源项目简介"}},[t._v("#")]),t._v(" "),a("H2Icon"),t._v("开源项目简介")],1),t._v(" "),a("blockquote",[a("p",[t._v("Loreal UI 是基于 "),a("strong",[t._v("微信小程序原生语法")]),t._v(" 实现的组件库。遵循简洁,易用的设计规范。")])]),t._v(" "),a("RightMenu")],1)}),[],!1,null,null,null);s.default=n.exports}}]);
|
||
taleLang.en.js
|
(function(GoNorth) {
"use strict";
(function(Tale) {
(function(Localization) {
Localization.ViewModel = {};
Localization.ViewModel.ChooseItem = "Choose item";
Localization.ViewModel.ChooseQuest = "Choose quest";
Localization.ViewModel.ChooseNpc = "Choose npc";
Localization.ViewModel.ChooseSkill = "Choose skill";
// Text Lines
GoNorth.DefaultNodeShapes.Localization.TypeNames["tale.PlayerText"] = "Player line";
GoNorth.DefaultNodeShapes.Localization.TypeNames["tale.NpcText"] = "Npc line";
Localization.PlayerTextPlaceHolder = "Player line";
Localization.NpcTextPlaceHolder = "Npc line";
// Choice
GoNorth.DefaultNodeShapes.Localization.TypeNames["tale.Choice"] = "Choice";
Localization.Choices = {};
Localization.Choices.ChoiceText = "Choice text";
Localization.Choices.AddNewChoice = "Add choice";
Localization.Choices.MoveUpToolTip = "Move choice up";
Localization.Choices.MoveDownToolTip = "Move choice down";
Localization.Choices.EditConditionToolTip = "Edit condition";
Localization.Choices.AllowMultipleSelectionToolTip = "Allow player to use the answer multiple time";
Localization.Choices.DeleteToolTip = "Delete choice";
// Action
Localization.Actions = {};
Localization.Actions.ChangeNpcValueLabel = "Change npc value";
Localization.Actions.ChooseItem = "<Choose item>";
Localization.Actions.SpawnItemInPlayerInventoryLabel = "Spawn item in player inventory";
Localization.Actions.SpawnItemInNpcInventoryLabel = "Spawn item in npc inventory";
Localization.Actions.TransferItemToPlayerInventoryLabel = "Give item to player";
Localization.Actions.TransferItemToNpcInventoryLabel = "Give item to npc";
Localization.Actions.ItemQuantity = "Quantity (blank = 1):"
Localization.Actions.SetNpcStateLabel = "Change npc state";
Localization.Actions.NpcLearnsSkillLabel = "Npc learns skill";
Localization.Actions.NpcForgetSkillLabel = "Npc forgets skill";
Localization.Actions.ChangeNpcSkillValueLabel = "Change npc skill value";
Localization.Actions.PersistDialogStateLabel = "Save dialog state";
Localization.Actions.PersistDialogStateWillContinueOnThisPointNextTalk = "The dialog will be continued from this node the next time the player talks to the npc.";
// Condition
Localization.Conditions = {};
Localization.Conditions.CheckNpcValueLabel = "Check npc value";
Localization.Conditions.NpcLabel = "Npc";
Localization.Conditions.CheckPlayerInventoryLabel = "Check player inventory";
Localization.Conditions.PlayerInventoryLabel = "Player inventory";
Localization.Conditions.CheckNpcInventoryLabel = "Check npc inventory";
Localization.Conditions.NpcInventoryLabel = "Npc inventory";
Localization.Conditions.ChooseItem = "<Choose item>";
|
Localization.Conditions.CheckChooseNpcSkillValueLabel = "Check npc skill value";
Localization.Conditions.NpcSkillPrefix = "Npc ";
Localization.Conditions.CheckNpcLearnedSkillLabel = "Npc can use skill";
Localization.Conditions.CheckNpcLearnedSkillPrefixLabel = "Npc can use ";
Localization.Conditions.CheckNpcNotLearnedSkillLabel = "Npc can not use skill";
Localization.Conditions.CheckNpcNotLearnedSkillPrefixLabel = "Npc can not use ";
}(Tale.Localization = Tale.Localization || {}));
}(GoNorth.Tale = GoNorth.Tale || {}));
}(window.GoNorth = window.GoNorth || {}));
|
Localization.Conditions.OpenItemTooltip = "Opens the chosen item";
Localization.Conditions.ItemOperatorHasAtLeast = "has at least";
Localization.Conditions.ItemOperatorHasMaximum = "hast at maximum";
Localization.Conditions.ItemCount = "Cnt";
|
orm.py
|
from pony.orm import *
from model.group import Group
from model.contact import Contact
class ORMFixture:
db = Database()
class ORMGroup(db.Entity):
_table_ = 'group_list'
id = PrimaryKey(int, column='group_id')
name = Optional(str, column='group_name')
header = Optional(str, column='group_header')
footer = Optional(str, column='group_footer')
contacts = Set(lambda: ORMFixture.ORMContact, table="address_in_groups", column = "id", reverse="groups", lazy=True)
class ORMContact(db.Entity):
_table_ = 'addressbook'
id = PrimaryKey(int, column='id')
firstname = Optional(str, column='firstname')
lastname = Optional(str, column='lastname')
deprecated = Optional(str, column='deprecated')
middlename = Optional(str, column="middlename")
nickname = Optional(str, column="nickname")
company = Optional(str, column="company")
title = Optional(str, column="title")
address = Optional(str, column="address")
home = Optional(str, column="home")
mobile = Optional(str, column="mobile")
work = Optional(str, column="work")
fax = Optional(str, column="fax")
email1 = Optional(str, column="email")
email2 = Optional(str, column="email2")
email3 = Optional(str, column="email3")
homepage = Optional(str, column="homepage")
address2 = Optional(str, column="address2")
phone2 = Optional(str, column="phone2")
notes = Optional(str, column="notes")
groups = Set(lambda: ORMFixture.ORMGroup, table="address_in_groups", column="group_id", reverse="contacts", lazy=True)
def __init__(self, host, name, user, password):
self.db.bind('mysql', host=host, database=name, user=user, password=password)
self.db.generate_mapping()
sql_debug(True)
def convert_groups_to_model(self, groups):
def convert(group):
return(Group(id=str(group.id), name=group.name, header=group.header, footer=group.footer))
return list(map(convert, groups))
@db_session
def get_group_list(self):
return self.convert_groups_to_model(select(g for g in ORMFixture.ORMGroup))
def convert_contacts_to_model(self, contacts):
def convert(contact):
return(Contact(id=str(contact.id), firstname=contact.firstname, lastname=contact.lastname, middlename=contact.middlename,
nickname=contact.nickname, company=contact.company,
title=contact.title, address=contact.address, homephone=contact.home,
mobilephone=contact.mobile, address2=contact.address2, phone2=contact.phone2, notes=contact.notes))
return list(map(convert, contacts))
@db_session
|
return self.convert_contacts_to_model(select(c for c in ORMFixture.ORMContact if c.deprecated is None))
@db_session
def get_contacts_in_group(self, group):
orm_group = list(select(g for g in ORMFixture.ORMGroup if g.id == group.id))[0]
return self.convert_contacts_to_model(orm_group.contacts)
@db_session
def get_contacts_not_in_group(self, group):
orm_group = list(select(g for g in ORMFixture.ORMGroup if g.id == group.id))[0]
return self.convert_contacts_to_model(
select(c for c in ORMFixture.ORMContact if c.deprecated is None and orm_group not in c.groups))
|
def get_contact_list(self):
|
custom.py
|
import pickle
from build_pipeline import make_anomaly
def fit(
X, output_dir, class_order=None, row_weights=None, **kwargs,
):
|
"""
This hook must be implemented with your fitting code, for running drum in the fit mode.
This hook MUST ALWAYS be implemented for custom tasks.
For inference models, this hook can stick around unimplemented, and won’t be triggered.
Parameters
----------
X: pd.DataFrame - training data to perform fit on
y: pd.Series - target data to perform fit on
output_dir: the path to write output. This is the path provided in '--output' parameter of the
'drum fit' command.
class_order : A two element long list dictating the order of classes which should be used for
modeling. This will not be used for anomaly detection models
row_weights: An array of non-negative numeric values which can be used to dictate how important
a row is. Row weights is only optionally used, and there will be no filtering for which
custom models support this. There are two situations when values will be passed into
row_weights, during smart downsampling and when weights are explicitly provided by the user
kwargs: Added for forwards compatibility
Returns
-------
Nothing
"""
estimator = make_anomaly()
estimator.fit(X)
# You must serialize out your model to the output_dir given, however if you wish to change this
# code, you will probably have to add a load_model method to read the serialized model back in
# When prediction is done.
# Check out this doc for more information on serialization https://github.com/datarobot/custom-\
# model-templates/tree/master/custom_model_runner#python
# NOTE: We currently set a 10GB limit to the size of the serialized model
with open("{}/artifact.pkl".format(output_dir), "wb") as fp:
pickle.dump(estimator, fp)
|
|
svg_drawing.js
|
// Node-RED Network Diagram Node - this code runs in Node-RED
module.exports = function (RED) {
const Svgson = require('svgson')
// Instantiate the Node-RED node, 'this' is the node being constructed
// and config contains the values set by the user in the flow editor.
function
|
(config) {
const fd = RED.nodes.getNode(config.fd) // get a handle onto FlexDash
RED.nodes.createNode(this, config)
if (!fd) return // not much we can do, wait for the next deploy...
// initWidget ensures that a widget exists in FD and initializes its props with the
// second arg. The third arg is the kind of widget to create, if it doesn't exist.
Object.assign(config, { svg_source: "" })
if (config.title === undefined || config.title === null) config.title = config.name
fd.initWidget(this, config, 'SvgDrawing')
// handle flow input messages
this.on("input", msg => {
const topic = msg.topic
const payload = msg.payload
switch(topic) {
case "add_element": // Add elements, or replace them if they already exist
/*if (!payload.elementType) {
//TODO logError("Invalid payload. A property named .elementType is not specified (msg._msgid = '" + _msgid + "')");
return;
}
var parentElements = null;
if (payload.parentSelector || payload.parentElementId) {
selector = payload.parentSelector || "#" + payload.parentElementId;
parentElements = $scope.rootDiv.querySelectorAll(selector);
}
if (!parentElements || !parentElements.length) {
// When no parent elements have been specified, add the SVG element directly under the SVG element
parentElements = [$scope.svg];
}
// It is not possible to add elements with the same id to multiple parent elements
if (parentElements.length > 1 && payload.elementId) {
//TODO logError("When multiple parent SVG elements are specified, it is not allowed to specify an .elementId (msg._msgid = '" + _msgid + "')");
return;
}
// Create a new SVG element (of the specified type) to every specified parent SVG element
parentElements.forEach(function(parentElement){
var newElement;
if (payload.foreignElement == true) {
newElement = document.createElement(payload.elementType);
}
else {
newElement = document.createElementNS("http://www.w3.org/2000/svg", payload.elementType);
}
if (payload.elementId) {
newElement.setAttribute("id", payload.elementId);
}
if (payload.elementAttributes) {
for (const [key, value] of Object.entries(payload.elementAttributes)) {
newElement.setAttribute(key, value);
}
}
if (payload.elementStyleAttributes) {
var style = "";
// Convert the Javascript object to a style formatted string
for (const [key, value] of Object.entries(payload.elementStyleAttributes)) {
style += key;
style += ":";
style += value;
style += "; ";
}
if (style.trim() !== "") {
newElement.setAttribute("style", style);
}
}
if (payload.textContent) {
setTextContent(newElement, payload.textContent);
}
// In the "Events" tabsheet might be a CSS selector that matches this new element. This means that the
// new element might need to get event handlers automatically. To make sure we ONLY apply those handlers
// to this new element, we add the element to a dummy parent which only has one child (i.e. this new element).
var dummyParent = document.createElement("div");
dummyParent.appendChild(newElement);
applyEventHandlers(dummyParent);
parentElement.appendChild(newElement);
})
*/
break;
default:
throw "Unsupported command '" + topic + "' in topic"
}
})
}
RED.nodes.registerType("fd-svg-drawing", FdSvgDrawingNode)
}
|
FdSvgDrawingNode
|
md_to_html.py
|
#! /usr/bin/env python3
# Script from https://gist.github.com/jiffyclub/5015986
# This script turns Markdown into HTML using the Python markdown library and wraps the result in a
# complete HTML document with default Bootstrap styling so that it's immediately printable.
# Requires the python libraries jinja2, markdown, and mdx_smartypants.
import argparse
import sys
import jinja2
import markdown
# To install dependencies in a virtualenv:
# $ py -3 -3 venv .venv
# $ .venv/Scripts/activate
# $ pip install jinja2
# $ pip install markdown
#
# To install dependencies on Ubuntu:
# $ sudo apt-get install python-jinja2 python-markdown
TEMPLATE = """<!DOCTYPE html>
<html>
<head>
<link href="http://netdna.bootstrapcdn.com/twitter-bootstrap/2.3.0/css/bootstrap-combined.min.css" rel="stylesheet">
<style>
body {
font-family: sans-serif;
}
code, pre {
font-family: monospace;
}
h1 code,
h2 code,
h3 code,
h4 code,
h5 code,
h6 code {
font-size: inherit;
}
</style>
</head>
<body>
<div class="container">
{{content}}
</div>
</body>
</html>
"""
# TEMPLATE = """<!DOCTYPE html>
# <html>
# <head>
# <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
# <meta name="referrer" content="no-referrer" />
# <meta name="referrer" content="unsafe-url" />
# <meta name="referrer" content="origin" />
# <meta name="referrer" content="no-referrer-when-downgrade" />
# <meta name="referrer" content="origin-when-cross-origin" />
# <title>Page Title</title>
# <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet">
# <style>
# body {
# font-family: Helvetica,Arial,sans-serif;
# }
# code, pre {
# font-family: monospace;
# }
# </style>
# </head>
# <body>
# <div class="container">
# {{content}}
# </div>
# </body>
# </html>
# """
def parse_args(args=None):
d = 'Make a complete, styled HTML document from a Markdown file.'
parser = argparse.ArgumentParser(description=d)
parser.add_argument('mdfile', type=argparse.FileType('r'), nargs='?',
default=sys.stdin,
help='File to convert. Defaults to stdin.')
parser.add_argument('-o', '--out', type=argparse.FileType('w'),
default=sys.stdout,
help='Output file name. Defaults to stdout.')
return parser.parse_args(args)
def
|
(args=None):
args = parse_args(args)
md = args.mdfile.read()
extensions = ['extra', 'smarty']
html = markdown.markdown(md, extensions=extensions, output_format='html5')
doc = jinja2.Template(TEMPLATE).render(content=html)
args.out.write(doc)
if __name__ == '__main__':
sys.exit()
|
main
|
unit_complex_ops.rs
|
use std::ops::{Div, DivAssign, Mul, MulAssign};
use crate::base::allocator::Allocator;
use crate::base::dimension::{U1, U2};
use crate::base::storage::Storage;
use crate::base::{DefaultAllocator, Unit, Vector, Vector2};
use crate::geometry::{Isometry, Point2, Rotation, Similarity, Translation, UnitComplex};
use simba::simd::SimdRealField;
/*
* This file provides:
* ===================
*
* UnitComplex × UnitComplex
* UnitComplex × Rotation -> UnitComplex
* Rotation × UnitComplex -> UnitComplex
*
* UnitComplex ÷ UnitComplex
* UnitComplex ÷ Rotation -> UnitComplex
* Rotation ÷ UnitComplex -> UnitComplex
*
*
* UnitComplex × Point
* UnitComplex × Vector
* UnitComplex × Unit<T>
*
* UnitComplex × Isometry<UnitComplex>
* UnitComplex × Similarity<UnitComplex>
* UnitComplex × Translation -> Isometry<UnitComplex>
*
* (Assignment Operators)
*
* UnitComplex ×= UnitComplex
* UnitComplex ×= Rotation
*
* UnitComplex ÷= UnitComplex
* UnitComplex ÷= Rotation
*
* Rotation ×= UnitComplex
* Rotation ÷= UnitComplex
*
*/
// UnitComplex × UnitComplex
impl<N: SimdRealField> Mul<Self> for UnitComplex<N> {
type Output = Self;
#[inline]
fn mul(self, rhs: Self) -> Self {
Unit::new_unchecked(self.into_inner() * rhs.into_inner())
}
}
impl<'a, N: SimdRealField> Mul<UnitComplex<N>> for &'a UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = UnitComplex<N>;
#[inline]
fn mul(self, rhs: UnitComplex<N>) -> Self::Output {
Unit::new_unchecked(self.complex() * rhs.into_inner())
}
}
impl<'b, N: SimdRealField> Mul<&'b UnitComplex<N>> for UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = Self;
#[inline]
fn mul(self, rhs: &'b UnitComplex<N>) -> Self::Output {
Unit::new_unchecked(self.into_inner() * rhs.as_ref())
}
}
impl<'a, 'b, N: SimdRealField> Mul<&'b UnitComplex<N>> for &'a UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = UnitComplex<N>;
#[inline]
fn mul(self, rhs: &'b UnitComplex<N>) -> Self::Output {
Unit::new_unchecked(self.complex() * rhs.as_ref())
}
}
// UnitComplex ÷ UnitComplex
impl<N: SimdRealField> Div<Self> for UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = Self;
#[inline]
fn div(self, rhs: Self) -> Self::Output {
Unit::new_unchecked(self.into_inner() * rhs.conjugate().into_inner())
}
}
impl<'a, N: SimdRealField> Div<UnitComplex<N>> for &'a UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = UnitComplex<N>;
#[inline]
fn div(self, rhs: UnitComplex<N>) -> Self::Output {
Unit::new_unchecked(self.complex() * rhs.conjugate().into_inner())
}
}
impl<'b, N: SimdRealField> Div<&'b UnitComplex<N>> for UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = Self;
#[inline]
fn div(self, rhs: &'b UnitComplex<N>) -> Self::Output {
Unit::new_unchecked(self.into_inner() * rhs.conjugate().into_inner())
}
}
impl<'a, 'b, N: SimdRealField> Div<&'b UnitComplex<N>> for &'a UnitComplex<N>
where
N::Element: SimdRealField,
{
type Output = UnitComplex<N>;
#[inline]
fn div(self, rhs: &'b UnitComplex<N>) -> Self::Output {
Unit::new_unchecked(self.complex() * rhs.conjugate().into_inner())
}
}
macro_rules! complex_op_impl(
($Op: ident, $op: ident;
($RDim: ident, $CDim: ident) $(for $Storage: ident: $StoragesBound: ident $(<$($BoundParam: ty),*>)*),*;
$lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Result: ty;
$action: expr; $($lives: tt),*) => {
impl<$($lives ,)* N: SimdRealField $(, $Storage: $StoragesBound $(<$($BoundParam),*>)*)*> $Op<$Rhs> for $Lhs
where N::Element: SimdRealField,
DefaultAllocator: Allocator<N, $RDim, $CDim> {
type Output = $Result;
#[inline]
fn $op($lhs, $rhs: $Rhs) -> Self::Output {
$action
}
}
}
);
macro_rules! complex_op_impl_all(
($Op: ident, $op: ident;
($RDim: ident, $CDim: ident) $(for $Storage: ident: $StoragesBound: ident $(<$($BoundParam: ty),*>)*),*;
$lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Result: ty;
[val val] => $action_val_val: expr;
[ref val] => $action_ref_val: expr;
[val ref] => $action_val_ref: expr;
[ref ref] => $action_ref_ref: expr;) => {
complex_op_impl!($Op, $op;
($RDim, $CDim) $(for $Storage: $StoragesBound $(<$($BoundParam),*>)*),*;
$lhs: $Lhs, $rhs: $Rhs, Output = $Result;
$action_val_val; );
complex_op_impl!($Op, $op;
($RDim, $CDim) $(for $Storage: $StoragesBound $(<$($BoundParam),*>)*),*;
$lhs: &'a $Lhs, $rhs: $Rhs, Output = $Result;
$action_ref_val; 'a);
complex_op_impl!($Op, $op;
($RDim, $CDim) $(for $Storage: $StoragesBound $(<$($BoundParam),*>)*),*;
$lhs: $Lhs, $rhs: &'b $Rhs, Output = $Result;
$action_val_ref; 'b);
complex_op_impl!($Op, $op;
($RDim, $CDim) $(for $Storage: $StoragesBound $(<$($BoundParam),*>)*),*;
$lhs: &'a $Lhs, $rhs: &'b $Rhs, Output = $Result;
$action_ref_ref; 'a, 'b);
}
);
// UnitComplex × Rotation
complex_op_impl_all!(
Mul, mul;
(U2, U2);
self: UnitComplex<N>, rhs: Rotation<N, U2>, Output = UnitComplex<N>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => self * UnitComplex::from_rotation_matrix(rhs);
);
// UnitComplex ÷ Rotation
complex_op_impl_all!(
Div, div;
(U2, U2);
self: UnitComplex<N>, rhs: Rotation<N, U2>, Output = UnitComplex<N>;
[val val] => &self / &rhs;
[ref val] => self / &rhs;
[val ref] => &self / rhs;
[ref ref] => self * UnitComplex::from_rotation_matrix(rhs).inverse();
);
// Rotation × UnitComplex
complex_op_impl_all!(
Mul, mul;
(U2, U2);
self: Rotation<N, U2>, rhs: UnitComplex<N>, Output = UnitComplex<N>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => UnitComplex::from_rotation_matrix(self) * rhs;
);
// Rotation ÷ UnitComplex
complex_op_impl_all!(
Div, div;
(U2, U2);
self: Rotation<N, U2>, rhs: UnitComplex<N>, Output = UnitComplex<N>;
[val val] => &self / &rhs;
[ref val] => self / &rhs;
[val ref] => &self / rhs;
[ref ref] => UnitComplex::from_rotation_matrix(self) * rhs.inverse();
);
// UnitComplex × Point
complex_op_impl_all!(
Mul, mul;
(U2, U1);
self: UnitComplex<N>, rhs: Point2<N>, Output = Point2<N>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => Point2::from(self * &rhs.coords);
);
// UnitComplex × Vector
complex_op_impl_all!(
Mul, mul;
(U2, U1) for S: Storage<N, U2>;
self: UnitComplex<N>, rhs: Vector<N, U2, S>, Output = Vector2<N>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => {
let i = self.as_ref().im;
let r = self.as_ref().re;
Vector2::new(r * rhs[0] - i * rhs[1], i * rhs[0] + r * rhs[1])
};
);
// UnitComplex × Unit<Vector>
complex_op_impl_all!(
Mul, mul;
(U2, U1) for S: Storage<N, U2>;
self: UnitComplex<N>, rhs: Unit<Vector<N, U2, S>>, Output = Unit<Vector2<N>>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => Unit::new_unchecked(self * rhs.as_ref());
);
// UnitComplex × Isometry<UnitComplex>
complex_op_impl_all!(
Mul, mul;
(U2, U1);
self: UnitComplex<N>, rhs: Isometry<N, U2, UnitComplex<N>>,
Output = Isometry<N, U2, UnitComplex<N>>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => {
let shift = self * &rhs.translation.vector;
Isometry::from_parts(Translation::from(shift), self * &rhs.rotation)
};
);
// UnitComplex × Similarity<UnitComplex>
complex_op_impl_all!(
Mul, mul;
(U2, U1);
self: UnitComplex<N>, rhs: Similarity<N, U2, UnitComplex<N>>,
Output = Similarity<N, U2, UnitComplex<N>>;
[val val] => &self * &rhs;
[ref val] => self * &rhs;
[val ref] => &self * rhs;
[ref ref] => Similarity::from_isometry(self * &rhs.isometry, rhs.scaling());
);
// UnitComplex × Translation
complex_op_impl_all!(
Mul, mul;
(U2, U1);
self: UnitComplex<N>, rhs: Translation<N, U2>,
Output = Isometry<N, U2, UnitComplex<N>>;
[val val] => Isometry::from_parts(Translation::from(&self * rhs.vector), self);
[ref val] => Isometry::from_parts(Translation::from( self * rhs.vector), self.clone());
[val ref] => Isometry::from_parts(Translation::from(&self * &rhs.vector), self);
[ref ref] => Isometry::from_parts(Translation::from( self * &rhs.vector), self.clone());
);
// Translation × UnitComplex
complex_op_impl_all!(
Mul, mul;
(U2, U1);
self: Translation<N, U2>, right: UnitComplex<N>,
Output = Isometry<N, U2, UnitComplex<N>>;
[val val] => Isometry::from_parts(self, right);
[ref val] => Isometry::from_parts(self.clone(), right);
[val ref] => Isometry::from_parts(self, right.clone());
[ref ref] => Isometry::from_parts(self.clone(), right.clone());
);
// UnitComplex ×= UnitComplex
impl<N: SimdRealField> MulAssign<UnitComplex<N>> for UnitComplex<N>
where
N::Element: SimdRealField,
{
#[inline]
fn mul_assign(&mut self, rhs: UnitComplex<N>) {
*self = &*self * rhs
}
}
impl<'b, N: SimdRealField> MulAssign<&'b UnitComplex<N>> for UnitComplex<N>
where
N::Element: SimdRealField,
{
#[inline]
fn mul_assign(&mut self, rhs: &'b UnitComplex<N>) {
*self = &*self * rhs
}
}
// UnitComplex /= UnitComplex
impl<N: SimdRealField> DivAssign<UnitComplex<N>> for UnitComplex<N>
where
N::Element: SimdRealField,
{
#[inline]
fn div_assign(&mut self, rhs: UnitComplex<N>) {
*self = &*self / rhs
}
}
impl<'b, N: SimdRealField> DivAssign<&'b UnitComplex<N>> for UnitComplex<N>
where
N::Element: SimdRealField,
{
#[inline]
fn div_assign(&mut self, rhs: &'b UnitComplex<N>) {
*self = &*self / rhs
}
}
// UnitComplex ×= Rotation
impl<N: SimdRealField> MulAssign<Rotation<N, U2>> for UnitComplex<N>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn mul_assign(&mut self, rhs: Rotation<N, U2>) {
*self = &*self * rhs
}
}
impl<'b, N: SimdRealField> MulAssign<&'b Rotation<N, U2>> for UnitComplex<N>
|
{
#[inline]
fn mul_assign(&mut self, rhs: &'b Rotation<N, U2>) {
*self = &*self * rhs
}
}
// UnitComplex ÷= Rotation
impl<N: SimdRealField> DivAssign<Rotation<N, U2>> for UnitComplex<N>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn div_assign(&mut self, rhs: Rotation<N, U2>) {
*self = &*self / rhs
}
}
impl<'b, N: SimdRealField> DivAssign<&'b Rotation<N, U2>> for UnitComplex<N>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn div_assign(&mut self, rhs: &'b Rotation<N, U2>) {
*self = &*self / rhs
}
}
// Rotation ×= UnitComplex
impl<N: SimdRealField> MulAssign<UnitComplex<N>> for Rotation<N, U2>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn mul_assign(&mut self, rhs: UnitComplex<N>) {
self.mul_assign(rhs.to_rotation_matrix())
}
}
impl<'b, N: SimdRealField> MulAssign<&'b UnitComplex<N>> for Rotation<N, U2>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn mul_assign(&mut self, rhs: &'b UnitComplex<N>) {
self.mul_assign(rhs.to_rotation_matrix())
}
}
// Rotation ÷= UnitComplex
impl<N: SimdRealField> DivAssign<UnitComplex<N>> for Rotation<N, U2>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn div_assign(&mut self, rhs: UnitComplex<N>) {
self.div_assign(rhs.to_rotation_matrix())
}
}
impl<'b, N: SimdRealField> DivAssign<&'b UnitComplex<N>> for Rotation<N, U2>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
{
#[inline]
fn div_assign(&mut self, rhs: &'b UnitComplex<N>) {
self.div_assign(rhs.to_rotation_matrix())
}
}
|
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, U2, U2>,
|
getSecretIamPolicy.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v1beta1
import (
"context"
"reflect"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Gets the access control policy for a secret. Returns empty policy if the secret exists and does not have a policy set.
func LookupSecretIamPolicy(ctx *pulumi.Context, args *LookupSecretIamPolicyArgs, opts ...pulumi.InvokeOption) (*LookupSecretIamPolicyResult, error) {
var rv LookupSecretIamPolicyResult
err := ctx.Invoke("google-native:secretmanager/v1beta1:getSecretIamPolicy", args, &rv, opts...)
if err != nil
|
return &rv, nil
}
type LookupSecretIamPolicyArgs struct {
OptionsRequestedPolicyVersion *string `pulumi:"optionsRequestedPolicyVersion"`
Project *string `pulumi:"project"`
SecretId string `pulumi:"secretId"`
}
type LookupSecretIamPolicyResult struct {
// Specifies cloud audit logging configuration for this policy.
AuditConfigs []AuditConfigResponse `pulumi:"auditConfigs"`
// Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:[email protected]`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
Bindings []BindingResponse `pulumi:"bindings"`
// `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
Etag string `pulumi:"etag"`
// Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
Version int `pulumi:"version"`
}
func LookupSecretIamPolicyOutput(ctx *pulumi.Context, args LookupSecretIamPolicyOutputArgs, opts ...pulumi.InvokeOption) LookupSecretIamPolicyResultOutput {
return pulumi.ToOutputWithContext(context.Background(), args).
ApplyT(func(v interface{}) (LookupSecretIamPolicyResult, error) {
args := v.(LookupSecretIamPolicyArgs)
r, err := LookupSecretIamPolicy(ctx, &args, opts...)
return *r, err
}).(LookupSecretIamPolicyResultOutput)
}
type LookupSecretIamPolicyOutputArgs struct {
OptionsRequestedPolicyVersion pulumi.StringPtrInput `pulumi:"optionsRequestedPolicyVersion"`
Project pulumi.StringPtrInput `pulumi:"project"`
SecretId pulumi.StringInput `pulumi:"secretId"`
}
func (LookupSecretIamPolicyOutputArgs) ElementType() reflect.Type {
return reflect.TypeOf((*LookupSecretIamPolicyArgs)(nil)).Elem()
}
type LookupSecretIamPolicyResultOutput struct{ *pulumi.OutputState }
func (LookupSecretIamPolicyResultOutput) ElementType() reflect.Type {
return reflect.TypeOf((*LookupSecretIamPolicyResult)(nil)).Elem()
}
func (o LookupSecretIamPolicyResultOutput) ToLookupSecretIamPolicyResultOutput() LookupSecretIamPolicyResultOutput {
return o
}
func (o LookupSecretIamPolicyResultOutput) ToLookupSecretIamPolicyResultOutputWithContext(ctx context.Context) LookupSecretIamPolicyResultOutput {
return o
}
// Specifies cloud audit logging configuration for this policy.
func (o LookupSecretIamPolicyResultOutput) AuditConfigs() AuditConfigResponseArrayOutput {
return o.ApplyT(func(v LookupSecretIamPolicyResult) []AuditConfigResponse { return v.AuditConfigs }).(AuditConfigResponseArrayOutput)
}
// Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:[email protected]`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
func (o LookupSecretIamPolicyResultOutput) Bindings() BindingResponseArrayOutput {
return o.ApplyT(func(v LookupSecretIamPolicyResult) []BindingResponse { return v.Bindings }).(BindingResponseArrayOutput)
}
// `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
func (o LookupSecretIamPolicyResultOutput) Etag() pulumi.StringOutput {
return o.ApplyT(func(v LookupSecretIamPolicyResult) string { return v.Etag }).(pulumi.StringOutput)
}
// Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
func (o LookupSecretIamPolicyResultOutput) Version() pulumi.IntOutput {
return o.ApplyT(func(v LookupSecretIamPolicyResult) int { return v.Version }).(pulumi.IntOutput)
}
func init() {
pulumi.RegisterOutputType(LookupSecretIamPolicyResultOutput{})
}
|
{
return nil, err
}
|
day18.py
|
from __future__ import annotations
import itertools
import math
from dataclasses import dataclass
from typing import Any
@dataclass
class TreeZipper:
inner: Any
path: list[int]
def up(self):
if self.path:
return TreeZipper(self.inner, self.path[:-1]), self.path[-1]
return None
def get(self):
v = self.inner
for p in self.path:
v = v[p]
return v
def set(self, x):
v = self.inner
for p in self.path[:-1]:
v = v[p]
v[self.path[-1]] = x
def try_left(self):
v = self.get()
if isinstance(v, list):
return TreeZipper(self.inner, self.path + [0])
return None
def try_right(self):
v = self.get()
if isinstance(v, list):
return TreeZipper(self.inner, self.path + [1])
return None
class Whoop(Exception):
pass
def do_reduce_exp(v: TreeZipper, depth):
if depth == 4 and isinstance(v.get(), list):
# print("exploding")
l, r = v.get()
v.set(0)
l_v = v
came_from_left = False
dont_go = False
while True:
# print("left", l_v, l_v.get())
if (l_v_n := l_v.try_left()) != None and not came_from_left:
l_v = l_v_n
break
elif (l_v_n_v := l_v.up()) != None:
# if we can up and didn't go left, do so
l_v = l_v_n_v[0]
came_from_left = l_v_n_v[1] == 0
else:
dont_go = True
# if we did nothing, we have to have reached the top and we were already from the left
break
if not dont_go:
while True:
if (l_v_n := l_v.try_right()) != None:
l_v = l_v_n
# try to go down and to the left
if isinstance(l_v.get(), int):
# if it's an int, add and quit
l_v.set(l_v.get() + l)
break
l_v = v
came_from_right = False
dont_go = False
while True:
# print("right", l_v, l_v.get())
if (l_v_n := l_v.try_right()) != None and not came_from_right:
l_v = l_v_n
break
elif (l_v_n_v := l_v.up()) != None:
# if we can up and didn't go left, do so
l_v = l_v_n_v[0]
came_from_right = l_v_n_v[1] == 1
else:
# if we did nothing, we have to have reached the top, bail
dont_go = True
break
if not dont_go:
while True:
if (l_v_n := l_v.try_left()) != None:
l_v = l_v_n
# try to go down and to the left
if isinstance(l_v.get(), int):
# if it's an int, add and quit
l_v.set(l_v.get() + r)
break
raise Whoop()
if (l_v := v.try_left()) != None:
do_reduce_exp(l_v, depth + 1)
if (r_v := v.try_right()) != None:
do_reduce_exp(r_v, depth + 1)
def do_reduce_splt(v: TreeZipper):
n_v = v.get()
if isinstance(n_v, int):
if n_v >= 10:
# print("splitting")
l_v = math.floor(n_v / 2)
r_v = math.ceil(n_v / 2)
v.set([l_v, r_v])
raise Whoop()
# otherwise, go and reduce both sides
if (l_v := v.try_left()) != None:
do_reduce_splt(l_v)
if (r_v := v.try_right()) != None:
do_reduce_splt(r_v)
def iter_red(l):
# print("doing", l)
|
def do_mag(v: TreeZipper):
if isinstance(v.get(), int):
return v.get()
return 3 * do_mag(v.try_left()) + 2 * do_mag(v.try_right())
inp = [
[[[[7,1],[0,0]],[6,[8,2]]],[8,[3,8]]],
[[[3,6],[9,4]],[[[5,9],5],[8,0]]],
[[[2,2],2],[1,[[1,6],7]]],
[[[[0,9],7],[[3,2],8]],[6,[7,9]]],
[[[[4,1],6],[[7,6],[2,2]]],[[[1,1],9],4]],
[[[8,[3,7]],3],[[4,4],[[9,1],[3,5]]]],
[[4,[8,2]],[1,[0,5]]],
[8,[8,7]],
[[[[2,2],7],[3,[4,5]]],[[4,6],[[2,5],4]]],
[[[5,5],[[5,1],3]],[[2,[8,2]],[[6,9],[1,5]]]],
[0,7],
[[[[5,1],3],[8,[5,3]]],7],
[[5,[2,[0,6]]],[[[5,5],2],[9,[8,0]]]],
[[[[3,4],2],0],4],
[[[[5,3],[2,7]],6],[[4,0],[9,[7,2]]]],
[[[3,[2,5]],[3,3]],7],
[[[[5,1],1],[4,8]],[[5,[8,3]],2]],
[[4,[[8,1],[8,5]]],[[[4,1],0],6]],
[[[5,5],[5,9]],[0,[[6,8],[0,1]]]],
[4,[[[7,9],4],0]],
[[[[0,1],7],[[3,6],5]],[8,[5,[6,1]]]],
[[[7,7],[8,0]],[6,[8,[7,9]]]],
[[[9,2],1],6],
[[[4,4],[2,[5,0]]],[[[2,6],6],[5,[4,3]]]],
[[2,[[4,7],5]],1],
[[8,7],[[[2,0],7],[1,[0,3]]]],
[[9,[[9,3],[9,5]]],[[8,7],[[4,1],[6,5]]]],
[[3,4],[[9,4],5]],
[[5,[[8,3],5]],1],
[[0,[[9,0],[3,2]]],[2,[7,[5,1]]]],
[[9,[[9,5],[8,6]]],[[4,4],[[3,8],[1,6]]]],
[[[1,[5,2]],9],[[4,6],[3,[8,0]]]],
[[1,7],[[1,7],9]],
[[[[3,4],3],[[7,5],[9,1]]],[[[5,0],[3,0]],[[7,9],6]]],
[[[7,2],[[1,0],[5,6]]],[[[3,7],[8,9]],6]],
[[[[1,1],1],[[8,6],[9,8]]],[[[1,8],4],[8,9]]],
[[[8,9],0],3],
[[[1,7],[1,[3,9]]],[6,[0,[8,5]]]],
[[0,5],[6,5]],
[[[[6,8],[4,5]],[[7,4],6]],[[3,6],5]],
[[8,[[0,9],8]],[9,[7,[7,9]]]],
[0,[[[7,1],2],[[0,4],4]]],
[[0,[[9,1],5]],[1,4]],
[3,4],
[[[9,3],[1,3]],[[[4,8],3],[[1,3],[9,0]]]],
[[[[5,1],7],[[9,2],8]],[[[6,8],[5,4]],[0,1]]],
[8,[[1,[3,0]],[[7,9],4]]],
[[[6,4],[[2,9],[9,0]]],[7,[[0,0],3]]],
[[3,[[9,6],6]],2],
[[5,[[3,1],[7,5]]],[[[6,7],9],[[4,6],[5,2]]]],
[[[4,[6,5]],8],[[6,[8,0]],[[9,3],3]]],
[[[[4,9],[2,8]],9],[[[5,0],0],[[3,4],[2,8]]]],
[[3,[7,1]],[9,[[1,8],7]]],
[[9,1],[0,[[0,7],[7,1]]]],
[[7,[0,[7,6]]],[[[5,3],1],[6,[4,5]]]],
[8,[[[2,1],[6,9]],[[3,3],[4,6]]]],
[0,[7,[3,0]]],
[[[[1,6],3],[5,[8,0]]],[[[6,6],7],1]],
[[[7,[8,3]],3],[[[2,8],5],[0,[9,5]]]],
[[[[5,1],4],[[1,2],1]],7],
[[[3,[7,5]],7],3],
[[9,[6,[1,1]]],[[[4,1],[2,2]],[[9,5],[7,7]]]],
[2,7],
[[[9,[8,6]],[[9,0],[6,5]]],[[[6,7],5],[[7,7],[2,3]]]],
[[[0,[6,4]],2],[4,[7,[7,5]]]],
[[[[6,1],[9,1]],[[6,1],9]],[[2,6],0]],
[[0,[[1,8],[3,5]]],[4,[[8,2],[4,2]]]],
[[[[9,3],[4,2]],2],[[[2,1],[7,1]],[4,8]]],
[[[3,[0,2]],3],8],
[[[4,[4,9]],9],[[[4,4],5],9]],
[[[[8,2],7],9],[[[1,0],[3,8]],[[7,7],0]]],
[[[3,2],[9,7]],[[9,[8,2]],[[5,5],3]]],
[[[7,[3,1]],[[8,3],1]],[[[8,6],[7,0]],4]],
[[9,[[9,1],5]],[[4,[1,1]],2]],
[[[[7,4],[0,3]],7],[8,[6,[3,3]]]],
[5,5],
[[6,7],[1,[7,[8,1]]]],
[[1,[0,4]],7],
[[[4,0],[[0,1],[2,2]]],[9,[[9,9],[3,0]]]],
[[[6,0],[[8,6],3]],[[5,1],[[8,1],[2,7]]]],
[[[[8,3],7],5],[9,[[5,1],8]]],
[[[[4,0],[5,2]],[[0,0],7]],2],
[[[[0,1],6],2],[[8,2],6]],
[[[[2,4],1],[[6,7],9]],[[[1,6],9],3]],
[[5,5],[[8,[7,7]],[5,8]]],
[[6,[[9,2],[9,7]]],[[[8,5],[4,4]],7]],
[[[9,[7,7]],[6,0]],[7,[[8,7],[1,2]]]],
[[7,[6,2]],[[9,[5,2]],[1,4]]],
[[[7,[5,9]],[[3,9],[4,5]]],[0,6]],
[[9,[8,[2,2]]],[[9,7],[1,1]]],
[[[[2,3],4],[[4,8],9]],[[9,[8,6]],[[0,9],0]]],
[[0,[[9,3],0]],[8,8]],
[[[[2,9],6],[[2,8],9]],[[[0,5],6],[[6,1],7]]],
[[9,[[8,3],[5,8]]],[[7,[3,0]],3]],
[[[4,[4,2]],0],1],
[[[[9,6],[5,8]],[6,2]],[[[8,0],[7,0]],[[5,6],4]]],
[[[8,0],[[4,3],[7,4]]],[[3,[7,9]],[[7,3],6]]],
[[3,[5,[0,3]]],[5,4]],
[[[[1,2],[6,3]],1],[[7,[5,2]],[[8,8],7]]],
[[4,[[8,0],[7,1]]],[[8,[8,0]],[[1,5],3]]]
]
inp = [
[[[0,[5,8]],[[1,7],[9,6]]],[[4,[1,2]],[[1,4],2]]],
[[[5,[2,8]],4],[5,[[9,9],0]]],
[6,[[[6,2],[5,6]],[[7,6],[4,7]]]],
[[[6,[0,7]],[0,9]],[4,[9,[9,0]]]],
[[[7,[6,4]],[3,[1,3]]],[[[5,5],1],9]],
[[6,[[7,3],[3,2]]],[[[3,8],[5,7]],4]],
[[[[5,4],[7,7]],8],[[8,3],8]],
[[9,3],[[9,9],[6,[4,9]]]],
[[2,[[7,7],7]],[[5,8],[[9,3],[0,2]]]],
[[[[5,2],5],[8,[3,7]]],[[5,[7,5]],[4,4]]]
]
# inp = [
# [[[[[7,0],[7,7]],[[7,7],[7,8]]],[[[7,7],[8,8]],[[7,7],[8,7]]]],[7,[5,[[3,8],[1,4]]]]]
# ]
def do_add(l):
it = iter(l)
x = next(it)
iter_red(x)
for y in it:
x = [x, y]
iter_red(x)
return x
out = do_add(inp)
print(out)
print(do_mag(TreeZipper(out, [])))
import copy
inp = [
[[[[7,1],[0,0]],[6,[8,2]]],[8,[3,8]]],
[[[3,6],[9,4]],[[[5,9],5],[8,0]]],
[[[2,2],2],[1,[[1,6],7]]],
[[[[0,9],7],[[3,2],8]],[6,[7,9]]],
[[[[4,1],6],[[7,6],[2,2]]],[[[1,1],9],4]],
[[[8,[3,7]],3],[[4,4],[[9,1],[3,5]]]],
[[4,[8,2]],[1,[0,5]]],
[8,[8,7]],
[[[[2,2],7],[3,[4,5]]],[[4,6],[[2,5],4]]],
[[[5,5],[[5,1],3]],[[2,[8,2]],[[6,9],[1,5]]]],
[0,7],
[[[[5,1],3],[8,[5,3]]],7],
[[5,[2,[0,6]]],[[[5,5],2],[9,[8,0]]]],
[[[[3,4],2],0],4],
[[[[5,3],[2,7]],6],[[4,0],[9,[7,2]]]],
[[[3,[2,5]],[3,3]],7],
[[[[5,1],1],[4,8]],[[5,[8,3]],2]],
[[4,[[8,1],[8,5]]],[[[4,1],0],6]],
[[[5,5],[5,9]],[0,[[6,8],[0,1]]]],
[4,[[[7,9],4],0]],
[[[[0,1],7],[[3,6],5]],[8,[5,[6,1]]]],
[[[7,7],[8,0]],[6,[8,[7,9]]]],
[[[9,2],1],6],
[[[4,4],[2,[5,0]]],[[[2,6],6],[5,[4,3]]]],
[[2,[[4,7],5]],1],
[[8,7],[[[2,0],7],[1,[0,3]]]],
[[9,[[9,3],[9,5]]],[[8,7],[[4,1],[6,5]]]],
[[3,4],[[9,4],5]],
[[5,[[8,3],5]],1],
[[0,[[9,0],[3,2]]],[2,[7,[5,1]]]],
[[9,[[9,5],[8,6]]],[[4,4],[[3,8],[1,6]]]],
[[[1,[5,2]],9],[[4,6],[3,[8,0]]]],
[[1,7],[[1,7],9]],
[[[[3,4],3],[[7,5],[9,1]]],[[[5,0],[3,0]],[[7,9],6]]],
[[[7,2],[[1,0],[5,6]]],[[[3,7],[8,9]],6]],
[[[[1,1],1],[[8,6],[9,8]]],[[[1,8],4],[8,9]]],
[[[8,9],0],3],
[[[1,7],[1,[3,9]]],[6,[0,[8,5]]]],
[[0,5],[6,5]],
[[[[6,8],[4,5]],[[7,4],6]],[[3,6],5]],
[[8,[[0,9],8]],[9,[7,[7,9]]]],
[0,[[[7,1],2],[[0,4],4]]],
[[0,[[9,1],5]],[1,4]],
[3,4],
[[[9,3],[1,3]],[[[4,8],3],[[1,3],[9,0]]]],
[[[[5,1],7],[[9,2],8]],[[[6,8],[5,4]],[0,1]]],
[8,[[1,[3,0]],[[7,9],4]]],
[[[6,4],[[2,9],[9,0]]],[7,[[0,0],3]]],
[[3,[[9,6],6]],2],
[[5,[[3,1],[7,5]]],[[[6,7],9],[[4,6],[5,2]]]],
[[[4,[6,5]],8],[[6,[8,0]],[[9,3],3]]],
[[[[4,9],[2,8]],9],[[[5,0],0],[[3,4],[2,8]]]],
[[3,[7,1]],[9,[[1,8],7]]],
[[9,1],[0,[[0,7],[7,1]]]],
[[7,[0,[7,6]]],[[[5,3],1],[6,[4,5]]]],
[8,[[[2,1],[6,9]],[[3,3],[4,6]]]],
[0,[7,[3,0]]],
[[[[1,6],3],[5,[8,0]]],[[[6,6],7],1]],
[[[7,[8,3]],3],[[[2,8],5],[0,[9,5]]]],
[[[[5,1],4],[[1,2],1]],7],
[[[3,[7,5]],7],3],
[[9,[6,[1,1]]],[[[4,1],[2,2]],[[9,5],[7,7]]]],
[2,7],
[[[9,[8,6]],[[9,0],[6,5]]],[[[6,7],5],[[7,7],[2,3]]]],
[[[0,[6,4]],2],[4,[7,[7,5]]]],
[[[[6,1],[9,1]],[[6,1],9]],[[2,6],0]],
[[0,[[1,8],[3,5]]],[4,[[8,2],[4,2]]]],
[[[[9,3],[4,2]],2],[[[2,1],[7,1]],[4,8]]],
[[[3,[0,2]],3],8],
[[[4,[4,9]],9],[[[4,4],5],9]],
[[[[8,2],7],9],[[[1,0],[3,8]],[[7,7],0]]],
[[[3,2],[9,7]],[[9,[8,2]],[[5,5],3]]],
[[[7,[3,1]],[[8,3],1]],[[[8,6],[7,0]],4]],
[[9,[[9,1],5]],[[4,[1,1]],2]],
[[[[7,4],[0,3]],7],[8,[6,[3,3]]]],
[5,5],
[[6,7],[1,[7,[8,1]]]],
[[1,[0,4]],7],
[[[4,0],[[0,1],[2,2]]],[9,[[9,9],[3,0]]]],
[[[6,0],[[8,6],3]],[[5,1],[[8,1],[2,7]]]],
[[[[8,3],7],5],[9,[[5,1],8]]],
[[[[4,0],[5,2]],[[0,0],7]],2],
[[[[0,1],6],2],[[8,2],6]],
[[[[2,4],1],[[6,7],9]],[[[1,6],9],3]],
[[5,5],[[8,[7,7]],[5,8]]],
[[6,[[9,2],[9,7]]],[[[8,5],[4,4]],7]],
[[[9,[7,7]],[6,0]],[7,[[8,7],[1,2]]]],
[[7,[6,2]],[[9,[5,2]],[1,4]]],
[[[7,[5,9]],[[3,9],[4,5]]],[0,6]],
[[9,[8,[2,2]]],[[9,7],[1,1]]],
[[[[2,3],4],[[4,8],9]],[[9,[8,6]],[[0,9],0]]],
[[0,[[9,3],0]],[8,8]],
[[[[2,9],6],[[2,8],9]],[[[0,5],6],[[6,1],7]]],
[[9,[[8,3],[5,8]]],[[7,[3,0]],3]],
[[[4,[4,2]],0],1],
[[[[9,6],[5,8]],[6,2]],[[[8,0],[7,0]],[[5,6],4]]],
[[[8,0],[[4,3],[7,4]]],[[3,[7,9]],[[7,3],6]]],
[[3,[5,[0,3]]],[5,4]],
[[[[1,2],[6,3]],1],[[7,[5,2]],[[8,8],7]]],
[[4,[[8,0],[7,1]]],[[8,[8,0]],[[1,5],3]]]
]
m_v = 0
for l, r in itertools.permutations(inp, 2):
l = copy.deepcopy(l)
r = copy.deepcopy(r)
v = [l, r]
print(f"{l=} {r=}")
do_add(v)
m_v = max(do_mag(TreeZipper(v, [])), m_v)
print(m_v)
|
while True:
t = TreeZipper(l, [])
try:
# print(l)
do_reduce_exp(t, 0)
do_reduce_splt(t)
except Whoop:
pass
else:
print("did nothing")
return
|
Bullet.js
|
class
|
extends Phaser.Physics.Arcade.Sprite {
constructor (scene, x, y) {
super(scene, x, y, 'bullet');
}
fire (x, y, velocity) {
this.body.reset(x, y);
this.setActive(true);
this.setVisible(true);
this.setVelocityY(-velocity);
}
deactivate () {
this.setActive(false);
this.setVisible(false);
}
preUpdate (time, delta) {
super.preUpdate(time, delta);
if (this.y <= -16) { this.deactivate() }
}
}
export default Bullet
|
Bullet
|
ctl.rs
|
#[doc = "Reader of register CTL"]
pub type R = crate::R<u32, super::CTL>;
#[doc = "Writer for register CTL"]
pub type W = crate::W<u32, super::CTL>;
#[doc = "Register CTL `reset()`'s with value 0"]
impl crate::ResetValue for super::CTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `STARTUP_CYCLES`"]
pub type STARTUP_CYCLES_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `STARTUP_CYCLES`"]
pub struct STARTUP_CYCLES_W<'a> {
w: &'a mut W,
}
impl<'a> STARTUP_CYCLES_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 16)) | (((value as u32) & 0xffff) << 16);
self.w
}
}
#[doc = "Reader of field `TRNG_EN`"]
pub type TRNG_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TRNG_EN`"]
pub struct TRNG_EN_W<'a> {
w: &'a mut W,
}
impl<'a> TRNG_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `NO_LFSR_FB`"]
pub type NO_LFSR_FB_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NO_LFSR_FB`"]
pub struct NO_LFSR_FB_W<'a> {
w: &'a mut W,
}
impl<'a> NO_LFSR_FB_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `TEST_MODE`"]
pub type TEST_MODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TEST_MODE`"]
pub struct TEST_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> TEST_MODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W
|
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
impl R {
#[doc = "Bits 16:31 - STARTUP_CYCLES"]
#[inline(always)]
pub fn startup_cycles(&self) -> STARTUP_CYCLES_R {
STARTUP_CYCLES_R::new(((self.bits >> 16) & 0xffff) as u16)
}
#[doc = "Bit 10 - TRNG_EN"]
#[inline(always)]
pub fn trng_en(&self) -> TRNG_EN_R {
TRNG_EN_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 2 - NO_LFSR_FB"]
#[inline(always)]
pub fn no_lfsr_fb(&self) -> NO_LFSR_FB_R {
NO_LFSR_FB_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - TEST_MODE"]
#[inline(always)]
pub fn test_mode(&self) -> TEST_MODE_R {
TEST_MODE_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 16:31 - STARTUP_CYCLES"]
#[inline(always)]
pub fn startup_cycles(&mut self) -> STARTUP_CYCLES_W {
STARTUP_CYCLES_W { w: self }
}
#[doc = "Bit 10 - TRNG_EN"]
#[inline(always)]
pub fn trng_en(&mut self) -> TRNG_EN_W {
TRNG_EN_W { w: self }
}
#[doc = "Bit 2 - NO_LFSR_FB"]
#[inline(always)]
pub fn no_lfsr_fb(&mut self) -> NO_LFSR_FB_W {
NO_LFSR_FB_W { w: self }
}
#[doc = "Bit 1 - TEST_MODE"]
#[inline(always)]
pub fn test_mode(&mut self) -> TEST_MODE_W {
TEST_MODE_W { w: self }
}
}
|
{
self.bit(false)
}
|
util.py
|
import re
|
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
|
def camel_to_snake(phrase):
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", phrase)
|
arbitrary_self_types_raw_pointer_trait.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(arbitrary_self_types)]
use std::ptr;
trait Foo {
fn foo(self: *const Self) -> &'static str;
unsafe fn bar(self: *const Self) -> i64;
unsafe fn complicated(self: *const *const Self) -> i64 where Self: Sized {
(*self).bar()
}
}
impl Foo for i32 {
fn foo(self: *const Self) -> &'static str {
"I'm an i32!"
}
unsafe fn bar(self: *const Self) -> i64
|
}
impl Foo for u32 {
fn foo(self: *const Self) -> &'static str {
"I'm a u32!"
}
unsafe fn bar(self: *const Self) -> i64 {
*self as i64
}
}
fn main() {
let null_i32 = ptr::null::<i32>() as *const Foo;
let null_u32 = ptr::null::<u32>() as *const Foo;
assert_eq!("I'm an i32!", null_i32.foo());
assert_eq!("I'm a u32!", null_u32.foo());
let valid_i32 = 5i32;
let valid_i32_thin = &valid_i32 as *const i32;
assert_eq!("I'm an i32!", valid_i32_thin.foo());
assert_eq!(5, unsafe { valid_i32_thin.bar() });
assert_eq!(5, unsafe { (&valid_i32_thin as *const *const i32).complicated() });
let valid_i32_fat = valid_i32_thin as *const Foo;
assert_eq!("I'm an i32!", valid_i32_fat.foo());
assert_eq!(5, unsafe { valid_i32_fat.bar() });
let valid_u32 = 18u32;
let valid_u32_thin = &valid_u32 as *const u32;
assert_eq!("I'm a u32!", valid_u32_thin.foo());
assert_eq!(18, unsafe { valid_u32_thin.bar() });
assert_eq!(18, unsafe { (&valid_u32_thin as *const *const u32).complicated() });
let valid_u32_fat = valid_u32_thin as *const Foo;
assert_eq!("I'm a u32!", valid_u32_fat.foo());
assert_eq!(18, unsafe { valid_u32_fat.bar() });
}
|
{
*self as i64
}
|
generisiCsv.ts
|
/* eslint-disable max-len */
/* eslint-disable @typescript-eslint/no-explicit-any */
import moment from 'moment';
import { State } from '@/models/store/state';
function sumirajKolone(kolone: Array<number>) {
return kolone.reduce((result: number, add: number) => {
const parsed = parseInt(add.toFixed(2).replace('.', ''), 10);
return result + parsed;
}, 0) / 100;
}
function obradiUlaz(red: any, index: number, zaglavlje: any) {
const kolone: Array<string> = [];
kolone.push('2');
kolone.push(zaglavlje?.poreskiPeriod || '');
kolone.push((index + 1).toString());
kolone.push(red.tipDokumenta);
kolone.push(red.brojFakture);
kolone.push(moment(red.datumFakture).format('YYYY-MM-DD'));
kolone.push(red.datumPrijema ? moment(red.datumPrijema).format('YYYY-MM-DD') : '');
kolone.push(red.dobavljac.naziv);
kolone.push(red.dobavljac.sjediste);
kolone.push(red.dobavljac.pdvBroj || '');
kolone.push(red.dobavljac.jib || '');
kolone.push(red.iznos.bezPDV.toFixed(2));
kolone.push(red.iznos.saPDV.toFixed(2));
kolone.push(red.iznos.pausalnaNaknada.toFixed(2));
kolone.push(red.iznos.ulazniPDV.toFixed(2));
kolone.push(red.iznos.ulazniPDVo.toFixed(2));
kolone.push(red.iznos.ulazniPDVno.toFixed(2));
kolone.push(red.iznos.ulazniPDVo32.toFixed(2));
kolone.push(red.iznos.ulazniPDVno33.toFixed(2));
kolone.push(red.iznos.ulazniPDVno34.toFixed(2));
return kolone;
}
function obradiIzlaz(red: any, index: number, zaglavlje: any) {
const kolone: Array<string> = [];
kolone.push('2');
kolone.push(zaglavlje?.poreskiPeriod || '');
kolone.push((index + 1).toString());
kolone.push(red.tipDokumenta);
kolone.push(red.brojFakture);
kolone.push(moment(red.datumFakture).format('YYYY-MM-DD'));
kolone.push(red.nabavljac.naziv);
kolone.push(red.nabavljac.sjediste);
kolone.push(red.nabavljac.pdvBroj || '');
kolone.push(red.nabavljac.jib || '');
kolone.push(red.iznos.ukupno.toFixed(2));
kolone.push(red.iznos.internaFaktura.toFixed(2));
kolone.push(red.iznos.izvoznaIsporuka.toFixed(2));
kolone.push(red.iznos.ostaleIsporukeOslobodjeno.toFixed(2));
kolone.push(red.iznos.osnovicaZaPDVObvezniku.toFixed(2));
kolone.push(red.iznos.izlazniPDVObvezniku.toFixed(2));
kolone.push(red.iznos.osnovicaNeObvavezniku.toFixed(2));
kolone.push(red.iznos.izlazniPDVNeObavezniku.toFixed(2));
kolone.push(red.iznos.izlazniPDVo32.toFixed(2));
kolone.push(red.iznos.izlazniPDVno33.toFixed(2));
kolone.push(red.iznos.izlazniPDVno34.toFixed(2));
return kolone;
}
function generisiPrateciSlog(state: State, tipDatoteke: string) {
if (tipDatoteke === '2') {
return [
'3',
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ukupno || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.internaFaktura || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.izvoznaIsporuka || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ostaleIsporukeOslobodjeno || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.osnovicaZaPDVObvezniku || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.izlazniPDVObvezniku || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.osnovicaNeObvavezniku || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.izlazniPDVNeObavezniku || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.izlazniPDVo32 || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.izlazniPDVno33 || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.izlazniPDVno34 || 0)).toFixed(2),
state.tsvIzvjestaj.length.toString(),
];
}
return [
'3',
|
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.bezPDV || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.saPDV || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.pausalnaNaknada || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ulazniPDV || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ulazniPDVo || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ulazniPDVno || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ulazniPDVo32 || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ulazniPDVno33 || 0)).toFixed(2),
sumirajKolone(state.tsvIzvjestaj.map((red: any) => red.iznos.ulazniPDVno34 || 0)).toFixed(2),
state.tsvIzvjestaj.length.toString(),
];
}
const generisiCsv = (state: State): string|undefined => {
const csvRedovi: Array<string> = [];
const { zaglavlje } = state;
if (!state.tsvIzvjestaj) {
return;
}
const slogZaglavlja = [
'1',
zaglavlje?.pdvBrojObveznika,
zaglavlje?.poreskiPeriod,
zaglavlje?.tipDatoteke,
`0${zaglavlje?.redniBrojDatoteke}` || '',
moment().format('YYYY-MM-DD'),
moment().format('hh:mm:ss'),
];
csvRedovi.push(slogZaglavlja.join(';'));
state.tsvIzvjestaj.forEach((red: any, index: number) => {
const kolone: Array<string> = zaglavlje?.tipDatoteke === '1' ? obradiUlaz(red, index, zaglavlje) : obradiIzlaz(red, index, zaglavlje);
csvRedovi.push(kolone.join(';'));
});
const prateciSlog: Array<string> = generisiPrateciSlog(state, zaglavlje?.tipDatoteke || '1');
csvRedovi.push(prateciSlog.join(';'));
// eslint-disable-next-line consistent-return
return csvRedovi.join('\n');
};
export default generisiCsv;
| |
DBN.py
|
import numpy
import theano
import theano.tensor as T
from deeplearning import rbm
class DBN():
def
|
(self, vsize=None, hsizes=[], lr=None, bsize=10, seed=123):
assert vsize and hsizes and lr
input = T.dmatrix('global_input')
self.layers = []
for hsize in hsizes:
r = rbm.RBM(input=input, vsize=vsize, hsize=hsize, bsize=bsize,
lr=lr, seed=seed)
self.layers.append(r)
# configure inputs for subsequent layer
input = self.layers[-1].hid
vsize = hsize
|
__init__
|
mod.rs
|
//! A module to contain Rust representations of ASTs in a format that Sapling can work with.
pub mod display_token;
pub mod json;
use std::error::Error;
use crate::arena::Arena;
use crate::core::Size;
use display_token::{write_tokens, DisplayToken, RecTok};
/// The possible ways an insertion could fail
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum InsertError {
/// Inserting the node would cause the child count to exceed the limit for that node type
TooManyChildren {
/// The name of the node who's child count has been exceeded
name: String,
/// The maximum number of children that the node being inserted into could have
max_children: usize,
},
}
impl std::fmt::Display for InsertError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
InsertError::TooManyChildren { name, max_children } => write!(
f,
"Can't exceed child count limit of {} in {}",
max_children, name
),
}
}
}
impl Error for InsertError {}
/// The possible ways a deletion could fail
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum DeleteError {
/// Deleting the requested node(s) would cause the parent to have too few children
TooFewChildren {
/// The [`display_name`](Ast::display_name) of the node who's minimum child count
/// constraint has been violated
name: String,
/// The minimum number of children that the node in question could have had
min_children: usize,
},
/// The requsted node doesn't exist. This shouldn't be able to occur in practice, because it
/// would require selecting a non-existent node - but nevertheless I don't think Sapling should
/// panic in this situation.
IndexOutOfRange {
/// The length of the current child array
len: usize,
/// The index of the child that was attempted to be removed
index: usize,
},
}
impl std::fmt::Display for DeleteError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DeleteError::TooFewChildren { name, min_children } => write!(
f,
"Node type {} can't have fewer than {} children.",
name, min_children
),
DeleteError::IndexOutOfRange { len, index } => write!(
f,
"Deleting child index {} is out of range 0..{}",
index, len
),
}
}
}
/// A function that recursively writes the tree view of a node and all its children to a given
/// [`String`]. To avoid allocations, this function modifies a [`String`] buffer
/// `indentation_string`, which will be appended to the front of every line, and will cause the
/// indentation levels to increase.
fn write_tree_view_recursive<'arena, Node>(
node: &'arena Node,
string: &mut String,
indentation_string: &mut String,
) where
Node: Ast<'arena>,
{
// Push the node's display name with indentation and a newline
string.push_str(indentation_string);
string.push_str(&node.display_name());
string.push('\n');
// Indent by two spaces
indentation_string.push_str(" ");
// Write all the children
for child in node.children().iter() {
write_tree_view_recursive(*child, string, indentation_string);
}
// Reset indentation
for _ in 0..2 {
indentation_string.pop();
}
}
/// A macro to generate an implementation of [`AstClass`] called `Class` and automatically fill the
/// required information.
#[macro_export]
macro_rules! ast_class {
($( $variant_name: ident => $c: expr, $name: literal );+) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[allow(missing_docs)]
pub enum Class {
$( $variant_name ),+
}
impl AstClass for Class {
fn to_char(self) -> char {
match self {
$( Class::$variant_name => $c ),+
}
}
fn name(self) -> &'static str {
match self {
$( Class::$variant_name => $name ),+
}
}
fn from_char(c: char) -> Option<Self> {
match c {
$( $c => Some(Class::$variant_name), )+
_ => None,
}
}
}
};
}
/// All the possible types which [`Ast`] nodes can take.
pub trait AstClass: Copy + std::fmt::Debug + Eq + std::hash::Hash {
/// Gets the [`char`] that would have been used to create this value
fn to_char(self) -> char;
/// Returns the name of this value
fn name(self) -> &'static str;
/// Creates a `AstClass` from a [`char`], returning [`None`] if invalid.
fn from_char(c: char) -> Option<Self>;
}
/// The specification of an AST that sapling can edit
pub trait Ast<'arena>: std::fmt::Debug + Clone + Eq + Default + std::hash::Hash {
/// A type parameter that will represent the different ways this AST can be rendered
type FormatStyle;
/// A type parameter that will represent the different node types this AST can use
type Class: AstClass;
/// The error type for ways that parsing can fail
type ParseErr: std::error::Error;
/* FORMATTING FUNCTIONS */
/// Returns an iterator of all the items that need to be rendered to the screen to make up this
/// node, along with their on-screen locations.
fn display_tokens_rec(
&'arena self,
format_style: &Self::FormatStyle,
) -> Vec<RecTok<'arena, Self>>;
/// Parses from text and adds to an arena, return a pointer to the allocated root node.
fn parse_to_arena(
text: impl std::io::Read,
arena: &'arena mut Arena<Self>,
) -> Result<&'arena Self, Self::ParseErr>;
/// Uses [`display_tokens_rec`](Self::display_tokens_rec) to build a stream of
/// [`DisplayToken`]s representing this node, but where each [`DisplayToken`] is paired with a
/// reference to the node that owns it. This extra data is used by the rendering code to
/// determine which pieces of text correspond to nodes that are selected.
fn display_tokens(
&'arena self,
format_style: &Self::FormatStyle,
) -> Vec<(&'arena Self, DisplayToken)> {
let mut tok_pairs: Vec<(&'arena Self, DisplayToken)> = Vec::new();
for i in self.display_tokens_rec(format_style) {
match i {
RecTok::Tok(t) => {
tok_pairs.push((self, t));
}
RecTok::Child(c) => {
tok_pairs.extend(c.display_tokens(format_style));
}
}
}
tok_pairs
}
/// Determine the space on the screen occupied by this node in an AST
fn size(&self, format_style: &Self::FormatStyle) -> Size;
/// Write the textual representation of this AST to a string
fn write_text(&'arena self, string: &mut String, format_style: &Self::FormatStyle) {
write_tokens(self, string, format_style);
}
/// Make a [`String`] representing this AST.
/// Same as [`write_text`](Ast::write_text) but creates a new [`String`].
fn to_text(&'arena self, format_style: &Self::FormatStyle) -> String {
let mut s = String::new();
self.write_text(&mut s, format_style);
s
}
/* DEBUG VIEW FUNCTIONS */
/// Get a slice over the direct children of this node. This operation is expected to be cheap
/// - it will be used a lot of times without caching the results.
fn children<'s>(&'s self) -> &'s [&'arena Self];
/// Get a mutable slice over the direct children of this node. Like
/// [`children`](Ast::children), this operation is expected to be cheap - it will be used a lot
/// of times without caching the results.
fn children_mut<'s>(&'s mut self) -> &'s mut [&'arena Self];
/// Replaces the `index`th child of this node with a reference to a `new_node`
fn replace_child(&mut self, index: usize, new_node: &'arena Self) {
self.children_mut()[index] = new_node;
}
/// Removes the child at a given index from the children of this node, if possible. If the
/// removal was not possible, then we return a custom error type.
fn delete_child(&mut self, index: usize) -> Result<(), DeleteError>;
/// Insert a given pre-allocated node as a new child of this node. This can involve allocating
/// extra nodes (usually as ancestors of `new_node` but descendants of `self`). This is
/// required for cases like inserting into JSON objects (e.g. inserting true into the empty
/// object will correspond to two extra nodes being allocated (an empty string and a field):
/// `{}` -> `{"": true}`).
fn insert_child(
&mut self,
new_node: &'arena Self,
arena: &'arena Arena<Self>,
index: usize,
) -> Result<(), InsertError>;
/// Get the display name of this node
fn display_name(&self) -> String;
/// Append a debug-style tree view of this node to a [`String`], similar to the output of the
/// Unix command 'tree'
fn write_tree_view(&'arena self, string: &mut String) {
let mut indentation_string = String::new();
write_tree_view_recursive(self, string, &mut indentation_string);
// Pop the unnecessary newline at the end
let popped_char = string.pop();
debug_assert_eq!(Some('\n'), popped_char);
}
/// Build a string of the a tree view of this node, similar to the output of the Unix command
/// 'tree'. This is the same as [`write_tree_view`](Self::write_tree_view), except that it
/// returns a new [`String`] rather than appending to an existing [`String`].
fn tree_view(&'arena self) -> String
|
/* AST EDITING FUNCTIONS */
/// Generate a new node from a AstClass.
fn from_class(node_type: Self::Class) -> Self;
/// Returns whether or not a given index and [`char`] is a valid child
fn is_valid_child(&self, index: usize, node_type: Self::Class) -> bool;
/// Returns whether or not a give index and ['char'] is a valid root
fn is_valid_root(&self, node_type: Self::Class) -> bool;
/// The name of this node as should be displayed in the DAG debug graph
fn debug_name(&self) -> String;
}
|
{
let mut s = String::new();
self.write_tree_view(&mut s);
s
}
|
quote.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use codemap::Span;
use ext::base::ExtCtxt;
use ext::base;
use ext::build::AstBuilder;
use parse::token::*;
use parse::token;
use ptr::P;
/**
*
* Quasiquoting works via token trees.
*
* This is registered as a set of expression syntax extension called quote!
* that lifts its argument token-tree to an AST representing the
* construction of the same token tree, with ast::TtNonterminal nodes
* interpreted as antiquotes (splices).
*
*/
pub mod rt {
use ast;
use codemap::Spanned;
use ext::base::ExtCtxt;
use parse::token;
use parse;
use print::pprust;
use ptr::P;
use ast::{TokenTree, Generics, Expr};
pub use parse::new_parser_from_tts;
pub use codemap::{BytePos, Span, dummy_spanned};
pub trait ToTokens {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> ;
}
impl ToTokens for TokenTree {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec!(self.clone())
}
}
impl<T: ToTokens> ToTokens for Vec<T> {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
let a = self.iter().flat_map(|t| t.to_tokens(cx).into_iter());
FromIterator::from_iter(a)
}
}
impl<T: ToTokens> ToTokens for Spanned<T> {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
// FIXME: use the span?
self.node.to_tokens(cx)
}
}
impl<T: ToTokens> ToTokens for Option<T> {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
match self {
&Some(ref t) => t.to_tokens(cx),
&None => Vec::new(),
}
}
}
/* Should be (when bugs in default methods are fixed):
trait ToSource : ToTokens {
// Takes a thing and generates a string containing rust code for it.
pub fn to_source() -> String;
// If you can make source, you can definitely make tokens.
pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] {
cx.parse_tts(self.to_source())
}
}
*/
// FIXME: Move this trait to pprust and get rid of *_to_str?
pub trait ToSource {
// Takes a thing and generates a string containing rust code for it.
fn to_source(&self) -> String;
}
// FIXME (Issue #16472): This should go away after ToToken impls
// are revised to go directly to token-trees.
trait ToSourceWithHygiene : ToSource {
// Takes a thing and generates a string containing rust code
// for it, encoding Idents as special byte sequences to
// maintain hygiene across serialization and deserialization.
fn to_source_with_hygiene(&self) -> String;
}
macro_rules! impl_to_source(
(P<$t:ty>, $pp:ident) => (
impl ToSource for P<$t> {
fn to_source(&self) -> String {
pprust::$pp(&**self)
}
}
impl ToSourceWithHygiene for P<$t> {
fn to_source_with_hygiene(&self) -> String {
pprust::with_hygiene::$pp(&**self)
}
}
);
($t:ty, $pp:ident) => (
impl ToSource for $t {
fn to_source(&self) -> String {
pprust::$pp(self)
}
}
impl ToSourceWithHygiene for $t {
fn to_source_with_hygiene(&self) -> String {
pprust::with_hygiene::$pp(self)
}
}
);
)
fn slice_to_source<'a, T: ToSource>(sep: &'static str, xs: &'a [T]) -> String {
xs.iter()
.map(|i| i.to_source())
.collect::<Vec<String>>()
.connect(sep)
.to_string()
}
fn slice_to_source_with_hygiene<'a, T: ToSourceWithHygiene>(
sep: &'static str, xs: &'a [T]) -> String {
xs.iter()
.map(|i| i.to_source_with_hygiene())
.collect::<Vec<String>>()
.connect(sep)
.to_string()
}
macro_rules! impl_to_source_slice(
($t:ty, $sep:expr) => (
impl<'a> ToSource for &'a [$t] {
fn to_source(&self) -> String {
slice_to_source($sep, *self)
}
}
impl<'a> ToSourceWithHygiene for &'a [$t] {
fn to_source_with_hygiene(&self) -> String {
slice_to_source_with_hygiene($sep, *self)
}
}
)
)
impl ToSource for ast::Ident {
fn to_source(&self) -> String {
token::get_ident(*self).get().to_string()
}
}
impl ToSourceWithHygiene for ast::Ident {
fn to_source_with_hygiene(&self) -> String {
self.encode_with_hygiene()
}
}
impl_to_source!(ast::Ty, ty_to_string)
impl_to_source!(ast::Block, block_to_string)
impl_to_source!(ast::Arg, arg_to_string)
impl_to_source!(Generics, generics_to_string)
impl_to_source!(P<ast::Item>, item_to_string)
impl_to_source!(P<ast::Method>, method_to_string)
impl_to_source!(P<ast::Stmt>, stmt_to_string)
impl_to_source!(P<ast::Expr>, expr_to_string)
impl_to_source!(P<ast::Pat>, pat_to_string)
impl_to_source!(ast::Arm, arm_to_string)
impl_to_source_slice!(ast::Ty, ", ")
impl_to_source_slice!(P<ast::Item>, "\n\n")
impl ToSource for ast::Attribute_ {
fn to_source(&self) -> String {
pprust::attribute_to_string(&dummy_spanned(self.clone()))
}
}
impl ToSourceWithHygiene for ast::Attribute_ {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
impl<'a> ToSource for &'a str {
fn to_source(&self) -> String {
let lit = dummy_spanned(ast::LitStr(
token::intern_and_get_ident(*self), ast::CookedStr));
pprust::lit_to_string(&lit)
}
}
impl<'a> ToSourceWithHygiene for &'a str {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
impl ToSource for () {
fn to_source(&self) -> String {
"()".to_string()
}
}
impl ToSourceWithHygiene for () {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
impl ToSource for bool {
fn to_source(&self) -> String {
let lit = dummy_spanned(ast::LitBool(*self));
pprust::lit_to_string(&lit)
}
}
impl ToSourceWithHygiene for bool {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
impl ToSource for char {
fn to_source(&self) -> String {
let lit = dummy_spanned(ast::LitChar(*self));
pprust::lit_to_string(&lit)
}
}
impl ToSourceWithHygiene for char {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
macro_rules! impl_to_source_int(
(signed, $t:ty, $tag:ident) => (
impl ToSource for $t {
fn to_source(&self) -> String {
let lit = ast::LitInt(*self as u64, ast::SignedIntLit(ast::$tag,
ast::Sign::new(*self)));
pprust::lit_to_string(&dummy_spanned(lit))
}
}
impl ToSourceWithHygiene for $t {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
);
(unsigned, $t:ty, $tag:ident) => (
impl ToSource for $t {
fn to_source(&self) -> String {
let lit = ast::LitInt(*self as u64, ast::UnsignedIntLit(ast::$tag));
pprust::lit_to_string(&dummy_spanned(lit))
}
}
impl ToSourceWithHygiene for $t {
fn to_source_with_hygiene(&self) -> String {
self.to_source()
}
}
);
)
impl_to_source_int!(signed, int, TyI)
impl_to_source_int!(signed, i8, TyI8)
impl_to_source_int!(signed, i16, TyI16)
impl_to_source_int!(signed, i32, TyI32)
impl_to_source_int!(signed, i64, TyI64)
impl_to_source_int!(unsigned, uint, TyU)
impl_to_source_int!(unsigned, u8, TyU8)
impl_to_source_int!(unsigned, u16, TyU16)
impl_to_source_int!(unsigned, u32, TyU32)
impl_to_source_int!(unsigned, u64, TyU64)
// Alas ... we write these out instead. All redundant.
macro_rules! impl_to_tokens(
($t:ty) => (
impl ToTokens for $t {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
cx.parse_tts_with_hygiene(self.to_source_with_hygiene())
}
}
)
)
macro_rules! impl_to_tokens_lifetime(
($t:ty) => (
impl<'a> ToTokens for $t {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
cx.parse_tts_with_hygiene(self.to_source_with_hygiene())
}
}
)
)
impl_to_tokens!(ast::Ident)
impl_to_tokens!(P<ast::Item>)
impl_to_tokens!(P<ast::Pat>)
impl_to_tokens!(ast::Arm)
impl_to_tokens!(P<ast::Method>)
impl_to_tokens_lifetime!(&'a [P<ast::Item>])
impl_to_tokens!(ast::Ty)
impl_to_tokens_lifetime!(&'a [ast::Ty])
impl_to_tokens!(Generics)
impl_to_tokens!(P<ast::Stmt>)
impl_to_tokens!(P<ast::Expr>)
impl_to_tokens!(ast::Block)
impl_to_tokens!(ast::Arg)
impl_to_tokens!(ast::Attribute_)
impl_to_tokens_lifetime!(&'a str)
impl_to_tokens!(())
impl_to_tokens!(char)
impl_to_tokens!(bool)
impl_to_tokens!(int)
impl_to_tokens!(i8)
impl_to_tokens!(i16)
impl_to_tokens!(i32)
impl_to_tokens!(i64)
impl_to_tokens!(uint)
impl_to_tokens!(u8)
impl_to_tokens!(u16)
impl_to_tokens!(u32)
impl_to_tokens!(u64)
pub trait ExtParseUtils {
fn parse_item(&self, s: String) -> P<ast::Item>;
fn parse_expr(&self, s: String) -> P<ast::Expr>;
fn parse_stmt(&self, s: String) -> P<ast::Stmt>;
fn parse_tts(&self, s: String) -> Vec<ast::TokenTree>;
}
trait ExtParseUtilsWithHygiene {
// FIXME (Issue #16472): This should go away after ToToken impls
// are revised to go directly to token-trees.
fn parse_tts_with_hygiene(&self, s: String) -> Vec<ast::TokenTree>;
}
impl<'a> ExtParseUtils for ExtCtxt<'a> {
fn parse_item(&self, s: String) -> P<ast::Item> {
let res = parse::parse_item_from_source_str(
"<quote expansion>".to_string(),
s,
self.cfg(),
self.parse_sess());
match res {
Some(ast) => ast,
None => {
error!("parse error");
panic!()
}
}
}
fn parse_stmt(&self, s: String) -> P<ast::Stmt> {
parse::parse_stmt_from_source_str("<quote expansion>".to_string(),
s,
self.cfg(),
Vec::new(),
self.parse_sess())
}
fn parse_expr(&self, s: String) -> P<ast::Expr> {
parse::parse_expr_from_source_str("<quote expansion>".to_string(),
s,
self.cfg(),
self.parse_sess())
}
fn parse_tts(&self, s: String) -> Vec<ast::TokenTree> {
parse::parse_tts_from_source_str("<quote expansion>".to_string(),
s,
self.cfg(),
self.parse_sess())
}
}
impl<'a> ExtParseUtilsWithHygiene for ExtCtxt<'a> {
fn parse_tts_with_hygiene(&self, s: String) -> Vec<ast::TokenTree> {
use parse::with_hygiene::parse_tts_from_source_str;
parse_tts_from_source_str("<quote expansion>".to_string(),
s,
self.cfg(),
self.parse_sess())
}
}
}
pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let (cx_expr, expr) = expand_tts(cx, sp, tts);
let expanded = expand_wrapper(cx, sp, cx_expr, expr);
base::MacExpr::new(expanded)
}
pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let expanded = expand_parse_call(cx, sp, "parse_expr", Vec::new(), tts);
base::MacExpr::new(expanded)
}
pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let expanded = expand_parse_call(cx, sp, "parse_item_with_outer_attributes",
vec!(), tts);
base::MacExpr::new(expanded)
}
pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let expanded = expand_parse_call(cx, sp, "parse_pat", vec!(), tts);
base::MacExpr::new(expanded)
}
pub fn expand_quote_arm(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let expanded = expand_parse_call(cx, sp, "parse_arm", vec!(), tts);
base::MacExpr::new(expanded)
}
pub fn expand_quote_ty(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let e_param_colons = cx.expr_lit(sp, ast::LitBool(false));
let expanded = expand_parse_call(cx, sp, "parse_ty",
vec!(e_param_colons), tts);
base::MacExpr::new(expanded)
}
pub fn expand_quote_method(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let expanded = expand_parse_call(cx, sp, "parse_method_with_outer_attributes",
vec!(), tts);
base::MacExpr::new(expanded)
}
pub fn expand_quote_stmt(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let e_attrs = cx.expr_vec_ng(sp);
let expanded = expand_parse_call(cx, sp, "parse_stmt",
vec!(e_attrs), tts);
base::MacExpr::new(expanded)
}
fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
strs.iter().map(|str| str_to_ident((*str).as_slice())).collect()
}
fn id_ext(str: &str) -> ast::Ident {
str_to_ident(str)
}
// Lift an ident to the expr that evaluates to that ident.
fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
let e_str = cx.expr_str(sp, token::get_ident(ident));
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("ident_of"),
vec!(e_str))
}
// Lift a name to the expr that evaluates to that name
fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
let e_str = cx.expr_str(sp, token::get_ident(ident));
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("name_of"),
vec!(e_str))
}
fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name));
cx.expr_path(cx.path_global(sp, idents))
}
fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name));
cx.expr_path(cx.path_global(sp, idents))
}
fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
let name = match bop {
token::Plus => "Plus",
token::Minus => "Minus",
token::Star => "Star",
token::Slash => "Slash",
token::Percent => "Percent",
token::Caret => "Caret",
token::And => "And",
token::Or => "Or",
token::Shl => "Shl",
token::Shr => "Shr"
};
mk_token_path(cx, sp, name)
}
fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> {
let name = match delim {
token::Paren => "Paren",
token::Bracket => "Bracket",
token::Brace => "Brace",
};
mk_token_path(cx, sp, name)
}
#[allow(non_upper_case_globals)]
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
match *tok {
token::BinOp(binop) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec!(mk_binop(cx, sp, binop)));
}
token::BinOpEq(binop) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"),
vec!(mk_binop(cx, sp, binop)));
}
token::OpenDelim(delim) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"),
vec![mk_delim(cx, sp, delim)]);
}
token::CloseDelim(delim) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"),
vec![mk_delim(cx, sp, delim)]);
}
token::LitByte(i) => {
let e_byte = mk_name(cx, sp, i.ident());
return cx.expr_call(sp, mk_token_path(cx, sp, "LitByte"), vec!(e_byte));
}
token::LitChar(i) => {
let e_char = mk_name(cx, sp, i.ident());
return cx.expr_call(sp, mk_token_path(cx, sp, "LitChar"), vec!(e_char));
}
token::LitInteger(i) => {
let e_int = mk_name(cx, sp, i.ident());
return cx.expr_call(sp, mk_token_path(cx, sp, "LitInteger"), vec!(e_int));
}
token::LitFloat(fident) => {
let e_fident = mk_name(cx, sp, fident.ident());
return cx.expr_call(sp, mk_token_path(cx, sp, "LitFloat"), vec!(e_fident));
}
token::LitStr(ident) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "LitStr"),
vec!(mk_name(cx, sp, ident.ident())));
}
token::LitStrRaw(ident, n) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "LitStrRaw"),
vec!(mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n)));
}
token::Ident(ident, style) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"),
vec![mk_ident(cx, sp, ident),
match style {
ModName => mk_token_path(cx, sp, "ModName"),
Plain => mk_token_path(cx, sp, "Plain"),
}]);
}
token::Lifetime(ident) =>
|
token::DocComment(ident) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "DocComment"),
vec!(mk_name(cx, sp, ident.ident())));
}
token::Interpolated(_) => panic!("quote! with interpolated token"),
_ => ()
}
let name = match *tok {
token::Eq => "Eq",
token::Lt => "Lt",
token::Le => "Le",
token::EqEq => "EqEq",
token::Ne => "Ne",
token::Ge => "Ge",
token::Gt => "Gt",
token::AndAnd => "AndAnd",
token::OrOr => "OrOr",
token::Not => "Not",
token::Tilde => "Tilde",
token::At => "At",
token::Dot => "Dot",
token::DotDot => "DotDot",
token::Comma => "Comma",
token::Semi => "Semi",
token::Colon => "Colon",
token::ModSep => "ModSep",
token::RArrow => "RArrow",
token::LArrow => "LArrow",
token::FatArrow => "FatArrow",
token::Pound => "Pound",
token::Dollar => "Dollar",
token::Underscore => "Underscore",
token::Eof => "Eof",
_ => panic!(),
};
mk_token_path(cx, sp, name)
}
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
match *tt {
ast::TtToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp,
mk_ast_path(cx, sp, "TtToken"),
vec!(e_sp, mk_token(cx, sp, tok)));
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("push"),
vec!(e_tok));
vec!(cx.stmt_expr(e_push))
},
ast::TtDelimited(sp, ref delimed) => {
mk_tt(cx, sp, &delimed.open_tt()).into_iter()
.chain(delimed.tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &delimed.close_tt()).into_iter())
.collect()
},
ast::TtSequence(..) => panic!("TtSequence in quote!"),
ast::TtNonterminal(sp, ident) => {
// tt.extend($ident.to_tokens(ext_cx).into_iter())
let e_to_toks =
cx.expr_method_call(sp,
cx.expr_ident(sp, ident),
id_ext("to_tokens"),
vec!(cx.expr_ident(sp, id_ext("ext_cx"))));
let e_to_toks =
cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("extend"),
vec!(e_to_toks));
vec!(cx.stmt_expr(e_push))
},
}
}
fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Vec<P<ast::Stmt>> {
let mut ss = Vec::new();
for tt in tts.iter() {
ss.extend(mk_tt(cx, sp, tt).into_iter());
}
ss
}
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> (P<ast::Expr>, P<ast::Expr>) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a TtNonterminal during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.
let mut p = cx.new_parser_from_tts(tts);
p.quote_depth += 1u;
let cx_expr = p.parse_expr();
if !p.eat(&token::Comma) {
p.fatal("expected token `,`");
}
let tts = p.parse_all_token_trees();
p.abort_if_errors();
// We also bind a single value, sp, to ext_cx.call_site()
//
// This causes every span in a token-tree quote to be attributed to the
// call site of the extension using the quote. We can't really do much
// better since the source of the quote may well be in a library that
// was not even parsed by this compilation run, that the user has no
// source code for (eg. in libsyntax, which they're just _using_).
//
// The old quasiquoter had an elaborate mechanism for denoting input
// file locations from which quotes originated; unfortunately this
// relied on feeding the source string of the quote back into the
// compiler (which we don't really want to do) and, in any case, only
// pushed the problem a very small step further back: an error
// resulting from a parse of the resulting quote is still attributed to
// the site the string literal occurred, which was in a source file
// _other_ than the one the user has control over. For example, an
// error in a quote from the protocol compiler, invoked in user code
// using macro_rules! for example, will be attributed to the macro_rules.rs
// file in libsyntax, which the user might not even have source to (unless
// they happen to have a compiler on hand). Over all, the phase distinction
// just makes quotes "hard to attribute". Possibly this could be fixed
// by recreating some of the original qq machinery in the tt regime
// (pushing fake FileMaps onto the parser to account for original sites
// of quotes, for example) but at this point it seems not likely to be
// worth the hassle.
let e_sp = cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("call_site"),
Vec::new());
let stmt_let_sp = cx.stmt_let(sp, false,
id_ext("_sp"),
e_sp);
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut vector = vec!(stmt_let_sp, stmt_let_tt);
vector.extend(mk_tts(cx, sp, tts.as_slice()).into_iter());
let block = cx.expr_block(
cx.block_all(sp,
Vec::new(),
vector,
Some(cx.expr_ident(sp, id_ext("tt")))));
(cx_expr, block)
}
fn expand_wrapper(cx: &ExtCtxt,
sp: Span,
cx_expr: P<ast::Expr>,
expr: P<ast::Expr>) -> P<ast::Expr> {
let uses = [
&["syntax", "ext", "quote", "rt"],
].iter().map(|path| {
let path = path.iter().map(|s| s.to_string()).collect();
cx.view_use_glob(sp, ast::Inherited, ids_ext(path))
}).collect();
// Explicitly borrow to avoid moving from the invoker (#16992)
let cx_expr_borrow = cx.expr_addr_of(sp, cx.expr_deref(sp, cx_expr));
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr_borrow);
cx.expr_block(cx.block_all(sp, uses, vec!(stmt_let_ext_cx), Some(expr)))
}
fn expand_parse_call(cx: &ExtCtxt,
sp: Span,
parse_method: &str,
arg_exprs: Vec<P<ast::Expr>> ,
tts: &[ast::TokenTree]) -> P<ast::Expr> {
let (cx_expr, tts_expr) = expand_tts(cx, sp, tts);
let cfg_call = || cx.expr_method_call(
sp, cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("cfg"), Vec::new());
let parse_sess_call = || cx.expr_method_call(
sp, cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("parse_sess"), Vec::new());
let new_parser_call =
cx.expr_call(sp,
cx.expr_ident(sp, id_ext("new_parser_from_tts")),
vec!(parse_sess_call(), cfg_call(), tts_expr));
let expr = cx.expr_method_call(sp, new_parser_call, id_ext(parse_method),
arg_exprs);
expand_wrapper(cx, sp, cx_expr, expr)
}
|
{
return cx.expr_call(sp,
mk_token_path(cx, sp, "Lifetime"),
vec!(mk_ident(cx, sp, ident)));
}
|
toasts_api.test.ts
|
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { take } from 'rxjs/operators';
import { ToastsApi } from './toasts_api';
import { uiSettingsServiceMock } from '../../ui_settings/ui_settings_service.mock';
import { i18nServiceMock } from '../../i18n/i18n_service.mock';
async function
|
(toasts: ToastsApi) {
return await toasts
.get$()
.pipe(take(1))
.toPromise();
}
function uiSettingsMock() {
const mock = uiSettingsServiceMock.createSetupContract();
(mock.get as jest.Mock<typeof mock['get']>).mockImplementation(() => (config: string) => {
switch (config) {
case 'notifications:lifetime:info':
return 5000;
case 'notifications:lifetime:warning':
return 10000;
case 'notification:lifetime:error':
return 30000;
default:
throw new Error(`Accessing ${config} is not supported in the mock.`);
}
});
return mock;
}
function toastDeps() {
return {
uiSettings: uiSettingsMock(),
i18n: i18nServiceMock.createStartContract(),
};
}
describe('#get$()', () => {
it('returns observable that emits NEW toast list when something added or removed', () => {
const toasts = new ToastsApi(toastDeps());
const onToasts = jest.fn();
toasts.get$().subscribe(onToasts);
const foo = toasts.add('foo');
const bar = toasts.add('bar');
toasts.remove(foo);
expect(onToasts).toHaveBeenCalledTimes(4);
const initial = onToasts.mock.calls[0][0];
expect(initial).toEqual([]);
const afterFoo = onToasts.mock.calls[1][0];
expect(afterFoo).not.toBe(initial);
expect(afterFoo).toEqual([foo]);
const afterFooAndBar = onToasts.mock.calls[2][0];
expect(afterFooAndBar).not.toBe(afterFoo);
expect(afterFooAndBar).toEqual([foo, bar]);
const afterRemoveFoo = onToasts.mock.calls[3][0];
expect(afterRemoveFoo).not.toBe(afterFooAndBar);
expect(afterRemoveFoo).toEqual([bar]);
});
it('does not emit a new toast list when unknown toast is passed to remove()', () => {
const toasts = new ToastsApi(toastDeps());
const onToasts = jest.fn();
toasts.get$().subscribe(onToasts);
toasts.add('foo');
onToasts.mockClear();
toasts.remove({ id: 'bar' });
expect(onToasts).not.toHaveBeenCalled();
});
});
describe('#add()', () => {
it('returns toast objects with auto assigned id', () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.add({ title: 'foo' });
expect(toast).toHaveProperty('id');
expect(toast).toHaveProperty('title', 'foo');
});
it('adds the toast to toasts list', async () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.add({});
const currentToasts = await getCurrentToasts(toasts);
expect(currentToasts).toHaveLength(1);
expect(currentToasts[0]).toBe(toast);
});
it('increments the toast ID for each additional toast', () => {
const toasts = new ToastsApi(toastDeps());
expect(toasts.add({})).toHaveProperty('id', '0');
expect(toasts.add({})).toHaveProperty('id', '1');
expect(toasts.add({})).toHaveProperty('id', '2');
});
it('accepts a string, uses it as the title', async () => {
const toasts = new ToastsApi(toastDeps());
expect(toasts.add('foo')).toHaveProperty('title', 'foo');
});
});
describe('#remove()', () => {
it('removes a toast', async () => {
const toasts = new ToastsApi(toastDeps());
toasts.remove(toasts.add('Test'));
expect(await getCurrentToasts(toasts)).toHaveLength(0);
});
it('ignores unknown toast', async () => {
const toasts = new ToastsApi(toastDeps());
toasts.add('Test');
toasts.remove({ id: 'foo' });
const currentToasts = await getCurrentToasts(toasts);
expect(currentToasts).toHaveLength(1);
});
});
describe('#addSuccess()', () => {
it('adds a success toast', async () => {
const toasts = new ToastsApi(toastDeps());
expect(toasts.addSuccess({})).toHaveProperty('color', 'success');
});
it('returns the created toast', async () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.addSuccess({});
const currentToasts = await getCurrentToasts(toasts);
expect(currentToasts[0]).toBe(toast);
});
});
describe('#addWarning()', () => {
it('adds a warning toast', async () => {
const toasts = new ToastsApi(toastDeps());
expect(toasts.addWarning({})).toHaveProperty('color', 'warning');
});
it('returns the created toast', async () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.addWarning({});
const currentToasts = await getCurrentToasts(toasts);
expect(currentToasts[0]).toBe(toast);
});
});
describe('#addDanger()', () => {
it('adds a danger toast', async () => {
const toasts = new ToastsApi(toastDeps());
expect(toasts.addDanger({})).toHaveProperty('color', 'danger');
});
it('returns the created toast', async () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.addDanger({});
const currentToasts = await getCurrentToasts(toasts);
expect(currentToasts[0]).toBe(toast);
});
});
describe('#addError', () => {
it('adds an error toast', async () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.addError(new Error('unexpected error'), { title: 'Something went wrong' });
expect(toast).toHaveProperty('color', 'danger');
expect(toast).toHaveProperty('title', 'Something went wrong');
});
it('returns the created toast', async () => {
const toasts = new ToastsApi(toastDeps());
const toast = toasts.addError(new Error('unexpected error'), { title: 'Something went wrong' });
const currentToasts = await getCurrentToasts(toasts);
expect(currentToasts[0]).toBe(toast);
});
});
|
getCurrentToasts
|
test_unique_with_counts_op_base.py
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('..')
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import numpy as np
from functools import partial
from typing import Optional, List, Callable, Dict, Any, Set
import unittest
import hypothesis
import hypothesis.strategies as st
def sample_program_configs(draw):
in_shape = draw(
st.lists(
st.integers(
min_value=2, max_value=100), min_size=1, max_size=1))
def generate_IndexTensor():
|
unique_with_counts_op = OpConfig(
type="unique_with_counts",
inputs={"X": ["input_data"]},
outputs={
"Out": ["output_data"],
"Index": ["Index_data"],
"Count": ["Count_data"]
},
attrs={"dtype": 2})
program_config = ProgramConfig(
ops=[unique_with_counts_op],
weights={
"Index_data": TensorConfig(data_gen=partial(generate_IndexTensor))
},
inputs={"input_data": TensorConfig(shape=in_shape), },
outputs=["output_data", "Index_data", "Count_data"])
return program_config
|
return np.random.randint(1, 5, size=in_shape).astype(np.int32)
|
dep.go
|
//
// Copyright 2018-present Sonatype Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package packages
import (
"strings"
"github.com/Masterminds/semver"
"github.com/golang/dep"
)
func ExtractPurlsUsingDep(project *dep.Project) ([]string, []string)
|
{
lockedProjects := project.Lock.P
var purls []string
var invalidPurls []string
for _, lockedProject := range lockedProjects {
var version string
i := lockedProject.Version().String()
version = strings.Replace(i, "v", "", -1)
if len(version) > 0 { // There must be a version we can use
name := lockedProject.Ident().String()
packageName := convertGopkgNameToPurl(string(name))
var purl = "pkg:" + packageName + "@" + version
_, err := semver.NewVersion(version)
if err != nil {
invalidPurls = append(invalidPurls, purl)
} else {
purls = append(purls, purl)
}
}
}
return purls, invalidPurls
}
|
|
copy.py
|
def copy(x):
|
pass
|
|
sub_test.go
|
package ptest
import (
"bytes"
"fmt"
"html/template"
"os"
"strconv"
"testing"
"time"
)
// https://go.dev/blog/subtests
// https://go.dev/blog/fuzz-beta
func TestMain(m *testing.M) {
// Setup code
//fmt.Println("Setup")
exitCode := m.Run()
// Tear down
//fmt.Println("Tear down")
os.Exit(exitCode)
}
func BenchmarkTmplExucte(b *testing.B) {
b.ReportAllocs()
templ := template.Must(template.New("test").Parse("Hello, {{.}}!"))
b.RunParallel(func(pb *testing.PB) {
// Each goroutine has its own bytes.Buffer.
var buf bytes.Buffer
for pb.Next() {
// The loop body is executed b.N times total across all goroutines.
buf.Reset()
templ.Execute(&buf, "World")
}
})
}
func BenchmarkTemplateParallel(b *testing.B) {
templ := template.Must(template.New("test").Parse("Hello, {{.}}!"))
b.RunParallel(func(pb *testing.PB) {
// 每个 goroutine 有属于自己的 bytes.Buffer.
var buf bytes.Buffer
for pb.Next() {
// 循环体在所有 goroutine 中总共执行 b.N 次
buf.Reset()
templ.Execute(&buf, "World")
}
})
}
/*
func BenchmarkFib3(b *testing.B) { benchmarkFib(3, b) }
func BenchmarkFib5(b *testing.B) { benchmarkFib(5, b) }
func BenchmarkFib7(b *testing.B) { benchmarkFib(7, b) }
func BenchmarkFib9(b *testing.B) { benchmarkFib(9, b) }
*/
func BenchmarkFib(b *testing.B) {
benchmarks := []struct {
name string
value int
}{
{"Fib3", 3},
{"Fib7", 7},
{"Fib10", 10},
}
for _, bm := range benchmarks {
b.Run(bm.name, func(b *testing.B) {
benchmarkFib(bm.value, b)
})
}
}
func benchmarkFib(i int, b *testing.B) {
for n := 0; n < b.N; n++ {
Fib(i)
}
}
func TestFib(t *testing.T) {
tc := []struct {
value int
want int
}{
{3, 2},
{7, 13},
}
for _, s := range tc {
got := Fib(s.value)
//log.Printf("Fib(%d)=%d\n", s.value, got)
if got != s.want {
t.Errorf("got %d; want %d", got, s.want)
}
}
}
func BenchmarkAppendFloat(b *testing.B) {
benchmarks := []struct {
name string
float float64
fmt byte
prec int
bitSize int
}{
{"Decimal", 33909, 'g', -1, 64},
{"Float", 339.7784, 'g', -1, 64},
{"Exp", -5.09e75, 'g', -1, 64},
{"NegExp", -5.11e-95, 'g', -1, 64},
{"Big", 123456789123456789123456789, 'g', -1, 64},
}
//fmt.Println("Setup AppendFloat")
dst := make([]byte, 30)
for _, bm := range benchmarks {
b.Run(bm.name, func(b *testing.B) {
for i := 0; i < b.N; i++ {
strconv.AppendFloat(dst[:0], bm.float, bm.fmt, bm.prec, bm.bitSize)
}
})
}
//fmt.Println("Setup AppendFloat")
}
func TestTime(t *testing.T) {
testCases := [
|
{
gmt string
loc string
want string
}{
{"12:31", "Europe/Zurich", "13:05"},
{"12:31", "America/New_York", "07:34"},
{"08:08", "Australia/Sydney", "18:12"},
}
//fmt.Println("Setup TestTime")
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s in %s", tc.gmt, tc.loc), func(t *testing.T) {
loc, err := time.LoadLocation(tc.loc)
if err != nil {
t.Fatal("could not load location")
}
gmt, _ := time.Parse("15:04", tc.gmt)
if got := gmt.In(loc).Format("15:04"); got != tc.want {
t.Errorf("got %s; want %s", got, tc.want)
}
})
}
//fmt.Println("Teardown TestTime")
}
|
]struct
|
db_utils.py
|
import sqlite3
|
def get_connection():
conn = sqlite3.connect('fridge.db')
conn.row_factory = sqlite3.Row
return conn
| |
shared_fs_craft_puzzle_decryption_chip.py
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
|
result = Tangible()
result.template = "object/tangible/item/quest/force_sensitive/shared_fs_craft_puzzle_decryption_chip.iff"
result.attribute_template_id = -1
result.stfName("quest_item_n","fs_craft_puzzle_decryption_chip")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
|
abstract-hd-wallet.ts
|
import { LegacyWallet } from './legacy-wallet';
import * as bip39 from 'bip39';
import { BIP32Interface } from 'bip32';
import BlueElectrum from '../../blue_modules/BlueElectrum';
import { Transaction } from './types';
type AbstractHDWalletStatics = {
derivationPath?: string;
};
/**
* @deprecated
*/
export class AbstractHDWallet extends LegacyWallet {
static type = 'abstract';
static typeReadable = 'abstract';
next_free_address_index: number; // eslint-disable-line camelcase
next_free_change_address_index: number; // eslint-disable-line camelcase
internal_addresses_cache: Record<number, string>; // eslint-disable-line camelcase
external_addresses_cache: Record<number, string>; // eslint-disable-line camelcase
_xpub: string;
usedAddresses: string[];
_address_to_wif_cache: Record<string, string>; // eslint-disable-line camelcase
gap_limit: number; // eslint-disable-line camelcase
passphrase?: string;
_node0?: BIP32Interface;
_node1?: BIP32Interface;
constructor() {
super();
const Constructor = this.constructor as unknown as AbstractHDWalletStatics;
this.next_free_address_index = 0;
this.next_free_change_address_index = 0;
this.internal_addresses_cache = {}; // index => address
this.external_addresses_cache = {}; // index => address
this._xpub = ''; // cache
this.usedAddresses = [];
this._address_to_wif_cache = {};
this.gap_limit = 20;
this._derivationPath = Constructor.derivationPath;
}
getNextFreeAddressIndex(): number {
return this.next_free_address_index;
}
getNextFreeChangeAddressIndex(): number {
return this.next_free_change_address_index;
}
prepareForSerialization(): void {
// deleting structures that cant be serialized
delete this._node0;
delete this._node1;
}
generate(): Promise<void> {
throw new Error('Not implemented');
}
allowSend(): boolean {
return false;
}
getTransactions(): Transaction[] {
throw new Error('Not implemented');
}
/**
* @return {Buffer} wallet seed
*/
_getSeed(): Buffer {
const mnemonic = this.secret;
const passphrase = this.passphrase;
return bip39.mnemonicToSeedSync(mnemonic, passphrase);
}
setSecret(newSecret: string): this {
this.secret = newSecret.trim().toLowerCase();
this.secret = this.secret.replace(/[^a-zA-Z0-9]/g, ' ').replace(/\s+/g, ' ');
// Try to match words to the default bip39 wordlist and complete partial words
const wordlist = bip39.wordlists[bip39.getDefaultWordlist()];
const lookupMap = wordlist.reduce((map, word) => {
const prefix3 = word.substr(0, 3);
const prefix4 = word.substr(0, 4);
map.set(prefix3, !map.has(prefix3) ? word : false);
map.set(prefix4, !map.has(prefix4) ? word : false);
return map;
}, new Map<string, string | false>());
this.secret = this.secret
.split(' ')
.map(word => lookupMap.get(word) || word)
.join(' ');
return this;
}
setPassphrase(passphrase: string): void {
this.passphrase = passphrase;
}
getPassphrase(): string | undefined {
return this.passphrase;
}
/**
* @return {Boolean} is mnemonic in `this.secret` valid
*/
validateMnemonic(): boolean {
return bip39.validateMnemonic(this.secret);
}
/**
* Derives from hierarchy, returns next free address
* (the one that has no transactions). Looks for several,
* gives up if none found, and returns the used one
*
* @return {Promise.<string>}
*/
async getAddressAsync(): Promise<string> {
// looking for free external address
let freeAddress = '';
let c;
for (c = 0; c < this.gap_limit + 1; c++) {
if (this.next_free_address_index + c < 0) continue;
const address = this._getExternalAddressByIndex(this.next_free_address_index + c);
this.external_addresses_cache[this.next_free_address_index + c] = address; // updating cache just for any case
let txs = [];
try {
txs = await BlueElectrum.getTransactionsByAddress(address);
} catch (Err: any) {
console.warn('BlueElectrum.getTransactionsByAddress()', Err.message);
}
if (txs.length === 0) {
// found free address
freeAddress = address;
this.next_free_address_index += c; // now points to _this one_
break;
}
}
if (!freeAddress) {
// could not find in cycle above, give up
freeAddress = this._getExternalAddressByIndex(this.next_free_address_index + c); // we didnt check this one, maybe its free
this.next_free_address_index += c; // now points to this one
}
this._address = freeAddress;
return freeAddress;
}
/**
* Derives from hierarchy, returns next free CHANGE address
* (the one that has no transactions). Looks for several,
* gives up if none found, and returns the used one
*
* @return {Promise.<string>}
*/
async getChangeAddressAsync(): Promise<string> {
// looking for free internal address
let freeAddress = '';
let c;
for (c = 0; c < this.gap_limit + 1; c++) {
if (this.next_free_change_address_index + c < 0) continue;
const address = this._getInternalAddressByIndex(this.next_free_change_address_index + c);
this.internal_addresses_cache[this.next_free_change_address_index + c] = address; // updating cache just for any case
let txs = [];
try {
txs = await BlueElectrum.getTransactionsByAddress(address);
} catch (Err: any) {
console.warn('BlueElectrum.getTransactionsByAddress()', Err.message);
|
freeAddress = address;
this.next_free_change_address_index += c; // now points to _this one_
break;
}
}
if (!freeAddress) {
// could not find in cycle above, give up
freeAddress = this._getInternalAddressByIndex(this.next_free_change_address_index + c); // we didnt check this one, maybe its free
this.next_free_change_address_index += c; // now points to this one
}
this._address = freeAddress;
return freeAddress;
}
/**
* Should not be used in HD wallets
*
* @deprecated
* @return {string}
*/
getAddress(): string | false {
return this._address;
}
_getExternalWIFByIndex(index: number): string {
throw new Error('Not implemented');
}
_getInternalWIFByIndex(index: number): string {
throw new Error('Not implemented');
}
_getExternalAddressByIndex(index: number): string {
throw new Error('Not implemented');
}
_getInternalAddressByIndex(index: number): string {
throw new Error('Not implemented');
}
getXpub(): string {
throw new Error('Not implemented');
}
/**
* Async function to fetch all transactions. Use getter to get actual txs.
* Also, sets internals:
* `this.internal_addresses_cache`
* `this.external_addresses_cache`
*
* @returns {Promise<void>}
*/
async fetchTransactions(): Promise<void> {
throw new Error('not implemented');
}
/**
* Given that `address` is in our HD hierarchy, try to find
* corresponding WIF
*
* @param address {String} In our HD hierarchy
* @return {String} WIF if found
*/
_getWifForAddress(address: string): string {
if (this._address_to_wif_cache[address]) return this._address_to_wif_cache[address]; // cache hit
// fast approach, first lets iterate over all addressess we have in cache
for (const indexStr of Object.keys(this.internal_addresses_cache)) {
const index = parseInt(indexStr);
if (this._getInternalAddressByIndex(index) === address) {
return (this._address_to_wif_cache[address] = this._getInternalWIFByIndex(index));
}
}
for (const indexStr of Object.keys(this.external_addresses_cache)) {
const index = parseInt(indexStr);
if (this._getExternalAddressByIndex(index) === address) {
return (this._address_to_wif_cache[address] = this._getExternalWIFByIndex(index));
}
}
// no luck - lets iterate over all addresses we have up to first unused address index
for (let c = 0; c <= this.next_free_change_address_index + this.gap_limit; c++) {
const possibleAddress = this._getInternalAddressByIndex(c);
if (possibleAddress === address) {
return (this._address_to_wif_cache[address] = this._getInternalWIFByIndex(c));
}
}
for (let c = 0; c <= this.next_free_address_index + this.gap_limit; c++) {
const possibleAddress = this._getExternalAddressByIndex(c);
if (possibleAddress === address) {
return (this._address_to_wif_cache[address] = this._getExternalWIFByIndex(c));
}
}
throw new Error('Could not find WIF for ' + address);
}
async fetchBalance(): Promise<void> {
throw new Error('Not implemented');
}
/**
* @inheritDoc
*/
async fetchUtxo(): Promise<void> {
throw new Error('Not implemented');
}
_getDerivationPathByAddress(address: string): string | false {
throw new Error('Not implemented');
}
_getNodePubkeyByIndex(node: number, index: number): Buffer | undefined {
throw new Error('Not implemented');
}
/**
* @returns {string} Root derivation path for wallet if any
*/
getDerivationPath() {
return this._derivationPath;
}
/*
* Set derivation path for the wallet
*
* @param {String} path - path
*/
setDerivationPath(path: string) {
this._derivationPath = path;
}
}
|
}
if (txs.length === 0) {
// found free address
|
dataset.py
|
import logging
import os
import urllib
from markupsafe import escape
import paste.httpexceptions
from six import string_types, text_type
from sqlalchemy import false, true
from galaxy import datatypes, model, util, web
from galaxy import managers
from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
from galaxy.util import inflector, smart_str
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web.base.controller import BaseUIController, ERROR, SUCCESS, url_for, UsesExtendedMetadataMixin
from galaxy.web.framework.helpers import grids, iff, time_ago, to_unicode
from galaxy.tools.errors import EmailErrorReporter
log = logging.getLogger( __name__ )
comptypes = []
try:
import zlib # noqa: F401
comptypes.append( 'zip' )
except ImportError:
pass
class HistoryDatasetAssociationListGrid( grids.Grid ):
# Custom columns for grid.
class HistoryColumn( grids.GridColumn ):
def get_value( self, trans, grid, hda):
return escape(hda.history.name)
class StatusColumn( grids.GridColumn ):
def get_value( self, trans, grid, hda ):
if hda.deleted:
return "deleted"
return ""
def get_accepted_filters( self ):
""" Returns a list of accepted filters for this column. """
accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
accepted_filters = []
for label, val in accepted_filter_labels_and_vals.items():
args = { self.key: val }
accepted_filters.append( grids.GridColumnFilter( label, args) )
return accepted_filters
# Grid definition
title = "Saved Datasets"
model_class = model.HistoryDatasetAssociation
template = '/dataset/grid.mako'
default_sort_key = "-update_time"
columns = [
grids.TextColumn( "Name", key="name",
# Link name to dataset's history.
link=( lambda item: iff( item.history.deleted, None, dict( operation="switch", id=item.id ) ) ), filterable="advanced", attach_popup=True ),
HistoryColumn( "History", key="history", sortable=False, target="inbound",
link=( lambda item: iff( item.history.deleted, None, dict( operation="switch_history", id=item.id ) ) ) ),
grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryDatasetAssociationTagAssociation, filterable="advanced", grid_name="HistoryDatasetAssocationListGrid" ),
StatusColumn( "Status", key="deleted", attach_popup=False ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
]
columns.append(
grids.MulticolFilterColumn(
"Search",
cols_to_filter=[ columns[0], columns[2] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = [
grids.GridOperation( "Copy to current history", condition=( lambda item: not item.deleted ), async_compatible=True ),
]
standard_filters = []
default_filter = dict( name="All", deleted="False", tags="All" )
preserve_state = False
use_async = True
use_paging = True
num_rows_per_page = 50
def build_initial_query( self, trans, **kwargs ):
# Show user's datasets that are not deleted, not in deleted histories, and not hidden.
# To filter HDAs by user, need to join model class/HDA and History table so that it is
# possible to filter by user. However, for dictionary-based filtering to work, need a
# primary table for the query.
return trans.sa_session.query( self.model_class ).select_from( self.model_class.table.join( model.History.table ) ) \
.filter( model.History.user == trans.user ) \
.filter( self.model_class.deleted == false() ) \
.filter( model.History.deleted == false() ) \
.filter( self.model_class.visible == true() )
class DatasetInterface( BaseUIController, UsesAnnotations, UsesItemRatings, UsesExtendedMetadataMixin ):
stored_list_grid = HistoryDatasetAssociationListGrid()
def __init__( self, app ):
super( DatasetInterface, self ).__init__( app )
self.history_manager = managers.histories.HistoryManager( app )
self.hda_manager = managers.hdas.HDAManager( app )
def _get_job_for_dataset( self, trans, dataset_id ):
'''
Return the job for the given dataset. This will throw an error if the
dataset is either nonexistent or inaccessible to the user.
'''
hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( dataset_id ) )
assert hda and self._can_access_dataset( trans, hda )
return hda.creating_job
def _can_access_dataset( self, trans, dataset_association, allow_admin=True, additional_roles=None ):
roles = trans.get_current_user_roles()
if additional_roles:
roles = roles + additional_roles
return ( allow_admin and trans.user_is_admin() ) or trans.app.security_agent.can_access_dataset( roles, dataset_association.dataset )
@web.expose
def errors( self, trans, id ):
hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( self.decode_id( id ) )
if not hda or not self._can_access_dataset( trans, hda ):
return trans.show_error_message( "Either this dataset does not exist or you do not have permission to access it." )
return trans.fill_template( "dataset/errors.mako", hda=hda )
@web.expose
def stdout( self, trans, dataset_id=None, **kwargs ):
trans.response.set_content_type( 'text/plain' )
stdout = ""
try:
job = self._get_job_for_dataset( trans, dataset_id )
stdout = job.stdout
except:
stdout = "Invalid dataset ID or you are not allowed to access this dataset"
return smart_str( stdout )
@web.expose
# TODO: Migrate stderr and stdout to use _get_job_for_dataset; it wasn't tested.
def stderr( self, trans, dataset_id=None, **kwargs ):
trans.response.set_content_type( 'text/plain' )
stderr = ""
try:
job = self._get_job_for_dataset( trans, dataset_id )
stderr = job.stderr
except:
stderr = "Invalid dataset ID or you are not allowed to access this dataset"
return smart_str( stderr )
@web.expose
def exit_code( self, trans, dataset_id=None, **kwargs ):
trans.response.set_content_type( 'text/plain' )
exit_code = ""
try:
job = self._get_job_for_dataset( trans, dataset_id )
exit_code = job.exit_code
except:
exit_code = "Invalid dataset ID or you are not allowed to access this dataset"
return exit_code
@web.expose
def report_error( self, trans, id, email='', message="", **kwd ):
biostar_report = 'biostar' in str( kwd.get( 'submit_error_report') ).lower()
if biostar_report:
return trans.response.send_redirect( url_for( controller='biostar', action='biostar_tool_bug_report', hda=id, email=email, message=message ) )
try:
error_reporter = EmailErrorReporter( id, trans.app )
error_reporter.send_report( user=trans.user, email=email, message=message )
return trans.show_ok_message( "Your error report has been sent" )
except Exception as e:
return trans.show_error_message( "An error occurred sending the report by email: %s" % str( e ) )
@web.expose
def
|
(self, trans, dataset_id=None, **kwd):
return 'This link may not be followed from within Galaxy.'
@web.expose
def get_metadata_file(self, trans, hda_id, metadata_name):
""" Allows the downloading of metadata files associated with datasets (eg. bai index for bam files) """
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( hda_id ) )
if not data or not self._can_access_dataset( trans, data ):
return trans.show_error_message( "You are not allowed to access this dataset" )
fname = ''.join(c in util.FILENAME_VALID_CHARS and c or '_' for c in data.name)[0:150]
file_ext = data.metadata.spec.get(metadata_name).get("file_ext", metadata_name)
trans.response.headers["Content-Type"] = "application/octet-stream"
trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (data.hid, fname, file_ext)
return open(data.metadata.get(metadata_name).file_name)
def _check_dataset(self, trans, hda_id):
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( hda_id) )
if data is None:
raise ValueError( 'Invalid reference dataset id: %s.' % hda_id)
except:
try:
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( hda_id ) )
except:
data = None
if not data:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( hda_id ) )
if not self._can_access_dataset( trans, data ):
return trans.show_error_message( "You are not allowed to access this dataset" )
if data.purged:
return trans.show_error_message( "The dataset you are attempting to view has been purged." )
if data.deleted and not ( trans.user_is_admin() or ( data.history and trans.get_user() == data.history.user ) ):
return trans.show_error_message( "The dataset you are attempting to view has been deleted." )
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
return data
@web.expose
@web.json
def transfer_status(self, trans, dataset_id, filename=None):
""" Primarily used for the S3ObjectStore - get the status of data transfer
if the file is not in cache """
data = self._check_dataset(trans, dataset_id)
if isinstance( data, string_types ):
return data
log.debug( "Checking transfer status for dataset %s..." % data.dataset.id )
# Pulling files in extra_files_path into cache is not handled via this
# method but that's primarily because those files are typically linked to
# through tool's output page anyhow so tying a JavaScript event that will
# call this method does not seem doable?
if data.dataset.external_filename:
return True
else:
return trans.app.object_store.file_ready(data.dataset)
@web.expose
def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, offset=None, ck_size=None, **kwd):
data = self._check_dataset(trans, dataset_id)
if not isinstance( data, trans.app.model.DatasetInstance ):
return data
# Ensure offset is an integer before passing through to datatypes.
if offset:
offset = int(offset)
# Ensure ck_size is an integer before passing through to datatypes.
if ck_size:
ck_size = int(ck_size)
return data.datatype.display_data(trans, data, preview, filename, to_ext, offset=offset, ck_size=ck_size, **kwd)
@web.expose
def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
"""Allows user to modify parameters of an HDA."""
message = None
status = 'done'
refresh_frames = []
error = False
def __ok_to_edit_metadata( dataset_id ):
# prevent modifying metadata when dataset is queued or running as input/output
# This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
for job_to_dataset_association in trans.sa_session.query(
self.app.model.JobToInputDatasetAssociation ) \
.filter_by( dataset_id=dataset_id ) \
.all() \
+ trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
.filter_by( dataset_id=dataset_id ) \
.all():
if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
return False
return True
if hid is not None:
history = trans.get_history()
# TODO: hid handling
data = history.datasets[ int( hid ) - 1 ]
id = None
elif dataset_id is not None:
id = self.decode_id( dataset_id )
data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
else:
trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" )
return trans.show_error_message( "You must provide a history dataset id to edit" )
if data is None:
trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) )
return trans.show_error_message( "History dataset id is invalid" )
if dataset_id is not None and data.history.user is not None and data.history.user != trans.user:
trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) )
# Do not reveal the dataset's existence
return trans.show_error_message( "History dataset id is invalid" )
current_user_roles = trans.get_current_user_roles()
if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
# Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
# so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
# permission. In this case, we'll reset this permission to the hda user's private role.
manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
if self._can_access_dataset( trans, data ):
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
params = util.Params( kwd, sanitize=False )
if params.change:
# The user clicked the Save button on the 'Change data type' form
if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
# prevent modifying datatype when dataset is queued or running as input/output
if not __ok_to_edit_metadata( data.id ):
message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them."
error = True
else:
trans.app.datatypes_registry.change_datatype( data, params.datatype )
trans.sa_session.flush()
trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming={ 'input1': data }, overwrite=False ) # overwrite is False as per existing behavior
message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
refresh_frames = ['history']
else:
message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype )
error = True
elif params.save:
# The user clicked the Save button on the 'Edit Attributes' form
data.name = params.name if params.name else ''
data.info = params.info if params.info else ''
message = ''
if __ok_to_edit_metadata( data.id ):
# The following for loop will save all metadata_spec items
for name, spec in data.datatype.metadata_spec.items():
if spec.get("readonly"):
continue
optional = params.get("is_" + name, None)
other = params.get("or_" + name, None)
if optional and optional == '__NOTHING__':
# optional element... == '__NOTHING__' actually means it is NOT checked (and therefore omitted)
setattr(data.metadata, name, None)
else:
if other:
setattr( data.metadata, name, other )
else:
setattr( data.metadata, name, spec.unwrap( params.get(name, None) ) )
data.datatype.after_setting_metadata( data )
# Sanitize annotation before adding it.
if params.annotation:
annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
# This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI
# Add or delete extended metadata
# if params.extended_metadata:
# em_string = params.extended_metadata
# if len(em_string):
# em_payload = None
# try:
# em_payload = loads(em_string)
# except Exception as e:
# message = 'Invalid JSON input'
# error = True
# if em_payload is not None:
# if data is not None:
# ex_obj = self.get_item_extended_metadata_obj(trans, data)
# if ex_obj is not None:
# self.unset_item_extended_metadata_obj(trans, data)
# self.delete_extended_metadata(trans, ex_obj)
# ex_obj = self.create_extended_metadata(trans, em_payload)
# self.set_item_extended_metadata_obj(trans, data, ex_obj)
# message = "Updated Extended metadata '%s'." % data.name
# status = 'done'
# else:
# message = "data not found"
# error = True
# else:
# if data is not None:
# ex_obj = self.get_item_extended_metadata_obj(trans, data)
# if ex_obj is not None:
# self.unset_item_extended_metadata_obj(trans, data)
# self.delete_extended_metadata(trans, ex_obj)
# message = "Deleted Extended metadata '%s'." % data.name
# status = 'done'
# If setting metadata previously failed and all required elements have now been set, clear the failed state.
if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
data._state = None
trans.sa_session.flush()
message = "Attributes updated%s" % message
refresh_frames = ['history']
else:
trans.sa_session.flush()
message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
status = "warning"
refresh_frames = ['history']
elif params.detect:
# The user clicked the Auto-detect button on the 'Edit Attributes' form
# prevent modifying metadata when dataset is queued or running as input/output
if not __ok_to_edit_metadata( data.id ):
message = "This dataset is currently being used as input or output. You cannot change metadata until the jobs have completed or you have canceled them."
error = True
else:
for name, spec in data.metadata.spec.items():
# We need to be careful about the attributes we are resetting
if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
if spec.get( 'default' ):
setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
message = 'Attributes have been queued to be updated'
trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming={ 'input1': data } )
trans.sa_session.flush()
refresh_frames = ['history']
elif params.convert_data:
target_type = kwd.get("target_type", None)
if target_type:
message = data.datatype.convert_dataset(trans, data, target_type)
refresh_frames = ['history']
elif params.update_roles_button:
if not trans.user:
return trans.show_error_message( "You must be logged in if you want to change permissions." )
if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
# The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles. We
# need to ensure that they did not associate roles that would cause accessibility problems.
permissions, in_roles, error, message = \
trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
if error:
# Keep the original role associations for the DATASET_ACCESS permission on the dataset.
permissions[ access_action ] = data.dataset.get_access_roles( trans )
status = 'error'
else:
error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
if error:
message += error
status = 'error'
else:
message = 'Your changes completed successfully.'
trans.sa_session.refresh( data.dataset )
else:
message = "You are not authorized to change this dataset's permissions"
error = True
else:
if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
# Copy dbkey into metadata, for backwards compatability
# This looks like it does nothing, but getting the dbkey
# returns the metadata dbkey unless it is None, in which
# case it resorts to the old dbkey. Setting the dbkey
# sets it properly in the metadata
# This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
data.metadata.dbkey = data.dbkey
# let's not overwrite the imported datatypes module with the variable datatypes?
# the built-in 'id' is overwritten in lots of places as well
ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
ldatatypes.sort()
all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
if error:
status = 'error'
return trans.fill_template( "/dataset/edit_attributes.mako",
data=data,
data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
datatypes=ldatatypes,
current_user_roles=current_user_roles,
all_roles=all_roles,
message=message,
status=status,
dataset_id=dataset_id,
refresh_frames=refresh_frames )
else:
return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
@web.expose
@web.require_login( "see all available datasets" )
def list( self, trans, **kwargs ):
"""List all available datasets"""
status = message = None
if 'operation' in kwargs:
operation = kwargs['operation'].lower()
hda_ids = util.listify( kwargs.get( 'id', [] ) )
# Display no message by default
status, message = None, None
# Load the hdas and ensure they all belong to the current user
hdas = []
for encoded_hda_id in hda_ids:
hda_id = self.decode_id( encoded_hda_id )
hda = trans.sa_session.query( model.HistoryDatasetAssociation ).filter_by( id=hda_id ).first()
if hda:
# Ensure history is owned by current user
if hda.history.user_id is not None and trans.user:
assert trans.user.id == hda.history.user_id, "HistoryDatasetAssocation does not belong to current user"
hdas.append( hda )
else:
log.warning( "Invalid history_dataset_association id '%r' passed to list", hda_id )
if hdas:
if operation == "switch" or operation == "switch_history":
# Switch to a history that the HDA resides in.
# Convert hda to histories.
histories = []
for hda in hdas:
histories.append( hda.history )
# Use history controller to switch the history. TODO: is this reasonable?
status, message = trans.webapp.controllers['history']._list_switch( trans, histories )
# Current history changed, refresh history frame; if switching to a dataset, set hda seek.
trans.template_context['refresh_frames'] = ['history']
if operation == "switch":
hda_ids = [ trans.security.encode_id( hda.id ) for hda in hdas ]
trans.template_context[ 'seek_hda_ids' ] = hda_ids
elif operation == "copy to current history":
#
# Copy datasets to the current history.
#
target_histories = [ trans.get_history() ]
# Reverse HDAs so that they appear in the history in the order they are provided.
hda_ids.reverse()
status, message = self._copy_datasets( trans, hda_ids, target_histories )
# Current history changed, refresh history frame.
trans.template_context['refresh_frames'] = ['history']
# Render the list view
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
@web.expose
def imp( self, trans, dataset_id=None, **kwd ):
""" Import another user's dataset via a shared URL; dataset is added to user's current history. """
# Set referer message.
referer = trans.request.referer
if referer:
referer_message = "<a href='%s'>return to the previous page</a>" % escape(referer)
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
# Error checking.
if not dataset_id:
return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True )
# Do import.
cur_history = trans.get_history( create=True )
status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ], imported=True )
message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
return trans.show_message( message, type=status, use_panels=True )
@web.expose
@web.json
@web.require_login( "use Galaxy datasets" )
def get_name_and_link_async( self, trans, id=None ):
""" Returns dataset's name and link. """
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
return_dict = { "name" : dataset.name, "link" : url_for( controller='dataset', action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) }
return return_dict
@web.expose
def get_embed_html_async( self, trans, id ):
""" Returns HTML for embedding a dataset in a page. """
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
if dataset:
return "Embedded Dataset '%s'" % dataset.name
@web.expose
@web.require_login( "use Galaxy datasets" )
def set_accessible_async( self, trans, id=None, accessible=False ):
""" Does nothing because datasets do not have an importable/accessible attribute. This method could potentially set another attribute. """
return
@web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
""" Rate a dataset asynchronously and return updated community data. """
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
if not dataset:
return trans.show_error_message( "The specified dataset does not exist." )
# Rate dataset.
self.rate_item( trans.sa_session, trans.get_user(), dataset, rating )
return self.get_ave_item_rating_data( trans.sa_session, dataset )
@web.expose
def display_by_username_and_slug( self, trans, username, slug, filename=None, preview=True ):
""" Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """
id = slug
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
if dataset:
# Filename used for composite types.
if filename:
return self.display( trans, dataset_id=slug, filename=filename)
truncated, dataset_data = self.hda_manager.text_data( dataset, preview )
dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
# If dataset is chunkable, get first chunk.
first_chunk = None
if dataset.datatype.CHUNKABLE:
first_chunk = dataset.datatype.get_chunk(trans, dataset, 0)
# If data is binary or an image, stream without template; otherwise, use display template.
# TODO: figure out a way to display images in display template.
if isinstance(dataset.datatype, datatypes.binary.Binary) or isinstance(dataset.datatype, datatypes.images.Image) or isinstance(dataset.datatype, datatypes.text.Html):
trans.response.set_content_type( dataset.get_mime() )
return open( dataset.file_name )
else:
# Get rating data.
user_item_rating = 0
if trans.get_user():
user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), dataset )
if user_item_rating:
user_item_rating = user_item_rating.rating
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, dataset )
return trans.fill_template_mako( "/dataset/display.mako", item=dataset, item_data=dataset_data,
truncated=truncated, user_item_rating=user_item_rating,
ave_item_rating=ave_item_rating, num_ratings=num_ratings,
first_chunk=first_chunk )
else:
raise web.httpexceptions.HTTPNotFound()
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
if dataset is None:
raise web.httpexceptions.HTTPNotFound()
truncated, dataset_data = self.hda_manager.text_data( dataset, preview=True )
# Get annotation.
dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
@web.expose
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
# TODO:?? why is this an access check only?
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
if not dataset:
web.httpexceptions.HTTPNotFound()
if dataset and new_annotation:
# Sanitize annotation before adding it.
new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
self.add_item_annotation( trans.sa_session, trans.get_user(), dataset, new_annotation )
trans.sa_session.flush()
return new_annotation
@web.expose
def get_annotation_async( self, trans, id ):
decoded_id = self.decode_id( id )
dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
dataset = self.hda_manager.error_if_uploading( dataset )
if not dataset:
web.httpexceptions.HTTPNotFound()
annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
if annotation and isinstance( annotation, text_type ):
annotation = annotation.encode( 'ascii', 'replace' ) # paste needs ascii here
return annotation
@web.expose
def display_at( self, trans, dataset_id, filename=None, **kwd ):
"""Sets up a dataset permissions so it is viewable at an external site"""
if not trans.app.config.enable_old_display_applications:
return trans.show_error_message( "This method of accessing external display applications has been disabled by a Galaxy administrator." )
site = filename
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id )
if not data:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
if 'display_url' not in kwd or 'redirect_url' not in kwd:
return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' )
try:
redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
except:
redirect_url = kwd['redirect_url'] # not all will need custom text
if trans.app.security_agent.dataset_is_public( data.dataset ):
return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac
if self._can_access_dataset( trans, data ):
trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site )
return trans.response.send_redirect( redirect_url )
else:
return trans.show_error_message( "You are not allowed to view this dataset at external sites. Please contact your Galaxy administrator to acquire management permissions for this dataset." )
@web.expose
def display_application( self, trans, dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None, **kwds ):
"""Access to external display applications"""
# Build list of parameters to pass in to display application logic (app_kwds)
app_kwds = {}
for name, value in dict(kwds).iteritems(): # clone kwds because we remove stuff as we go.
if name.startswith( "app_" ):
app_kwds[ name[ len( "app_" ): ] ] = value
del kwds[ name ]
if kwds:
log.debug( "Unexpected Keywords passed to display_application: %s" % kwds ) # route memory?
# decode ids
data, user = decode_dataset_user( trans, dataset_id, user_id )
if not data:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
if user is None:
user = trans.user
if user:
user_roles = user.all_roles()
else:
user_roles = []
# Decode application name and link name
app_name = urllib.unquote_plus( app_name )
link_name = urllib.unquote_plus( link_name )
if None in [ app_name, link_name ]:
return trans.show_error_message( "A display application name and link name must be provided." )
if self._can_access_dataset( trans, data, additional_roles=user_roles ):
msg = []
preparable_steps = []
refresh = False
display_app = trans.app.datatypes_registry.display_applications.get( app_name )
if not display_app:
log.debug( "Unknown display application has been requested: %s", app_name )
return paste.httpexceptions.HTTPNotFound( "The requested display application (%s) is not available." % ( app_name ) )
dataset_hash, user_hash = encode_dataset_user( trans, data, user )
try:
display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans, app_kwds )
except Exception as e:
log.debug( "Error generating display_link: %s", e )
# User can sometimes recover from, e.g. conversion errors by fixing input metadata, so use conflict
return paste.httpexceptions.HTTPConflict( "Error generating display_link: %s" % e )
if not display_link:
log.debug( "Unknown display link has been requested: %s", link_name )
return paste.httpexceptions.HTTPNotFound( "Unknown display link has been requested: %s" % link_name )
if data.state == data.states.ERROR:
msg.append( ( 'This dataset is in an error state, you cannot view it at an external display application.', 'error' ) )
elif data.deleted:
msg.append( ( 'This dataset has been deleted, you cannot view it at an external display application.', 'error' ) )
elif data.state != data.states.OK:
msg.append( ( 'You must wait for this dataset to be created before you can view it at an external display application.', 'info' ) )
refresh = True
else:
# We have permissions, dataset is not deleted and is in OK state, allow access
if display_link.display_ready():
if app_action in [ 'data', 'param' ]:
assert action_param, "An action param must be provided for a data or param action"
# data is used for things with filenames that could be passed off to a proxy
# in case some display app wants all files to be in the same 'directory',
# data can be forced to param, but not the other way (no filename for other direction)
# get param name from url param name
try:
action_param = display_link.get_param_name_by_url( action_param )
except ValueError as e:
log.debug( e )
return paste.httpexceptions.HTTPNotFound( str( e ) )
value = display_link.get_param_value( action_param )
assert value, "An invalid parameter name was provided: %s" % action_param
assert value.parameter.viewable, "This parameter is not viewable."
if value.parameter.type == 'data':
try:
if action_param_extra:
assert value.parameter.allow_extra_files_access, "Extra file content requested (%s), but allow_extra_files_access is False." % ( action_param_extra )
file_name = os.path.join( value.extra_files_path, action_param_extra )
else:
file_name = value.file_name
content_length = os.path.getsize( file_name )
rval = open( file_name )
except OSError as e:
log.debug( "Unable to access requested file in display application: %s", e )
return paste.httpexceptions.HTTPNotFound( "This file is no longer available." )
else:
rval = str( value )
content_length = len( rval )
trans.response.set_content_type( value.mime_type( action_param_extra=action_param_extra ) )
trans.response.headers[ 'Content-Length' ] = content_length
return rval
elif app_action is None:
# redirect user to url generated by display link
# Fix for Safari caching display links, which can change if the underlying dataset has an attribute change, e.g. name, metadata, etc
trans.response.headers[ 'Cache-Control' ] = [ 'no-cache', 'max-age=0', 'no-store', 'must-revalidate' ]
return trans.response.send_redirect( display_link.display_url() )
else:
msg.append( ( 'Invalid action provided: %s' % app_action, 'error' ) )
else:
if app_action is None:
if trans.history != data.history:
msg.append( ( 'You must import this dataset into your current history before you can view it at the desired display application.', 'error' ) )
else:
refresh = True
msg.append( ( 'Launching this display application required additional datasets to be generated, you can view the status of these jobs below. ', 'info' ) )
if not display_link.preparing_display():
display_link.prepare_display()
preparable_steps = display_link.get_prepare_steps()
else:
raise Exception( 'Attempted a view action (%s) on a non-ready display application' % app_action )
return trans.fill_template_mako( "dataset/display_application/display.mako",
msg=msg,
display_app=display_app,
display_link=display_link,
refresh=refresh,
preparable_steps=preparable_steps )
return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
def _delete( self, trans, dataset_id ):
message = None
status = 'done'
id = None
try:
id = self.decode_id( dataset_id )
hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
assert hda, 'Invalid HDA: %s' % id
# Walk up parent datasets to find the containing history
topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
# Mark deleted and cleanup
hda.mark_deleted()
hda.clear_associated_files()
trans.log_event( "Dataset id %s marked as deleted" % str(id) )
self.hda_manager.stop_creating_job( hda )
trans.sa_session.flush()
except Exception as e:
msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
log.exception( msg + ': ' + str( e ) )
trans.log_event( msg )
message = 'Dataset deletion failed'
status = 'error'
return ( message, status )
def _undelete( self, trans, dataset_id ):
message = None
status = 'done'
id = None
try:
id = self.decode_id( dataset_id )
history = trans.get_history()
hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
assert hda and hda.undeletable, 'Invalid HDA: %s' % id
# Walk up parent datasets to find the containing history
topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
hda.mark_undeleted()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been undeleted" % str(id) )
except Exception:
msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
log.exception( msg )
trans.log_event( msg )
message = 'Dataset undeletion failed'
status = 'error'
return ( message, status )
def _unhide( self, trans, dataset_id ):
try:
id = self.decode_id( dataset_id )
except:
return False
history = trans.get_history()
hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
if hda:
# Walk up parent datasets to find the containing history
topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
assert topmost_parent in history.datasets, "Data does not belong to current history"
# Mark undeleted
hda.mark_unhidden()
trans.sa_session.flush()
trans.log_event( "Dataset id %s has been unhidden" % str(id) )
return True
return False
def _purge( self, trans, dataset_id ):
message = None
status = 'done'
try:
id = self.decode_id( dataset_id )
user = trans.get_user()
hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
# Invalid HDA
assert hda, 'Invalid history dataset ID'
# Walk up parent datasets to find the containing history
topmost_parent = hda
while topmost_parent.parent:
topmost_parent = topmost_parent.parent
# If the user is anonymous, make sure the HDA is owned by the current session.
if not user:
current_history_id = trans.galaxy_session.current_history_id
assert topmost_parent.history.id == current_history_id, 'Data does not belong to current user'
# If the user is known, make sure the HDA is owned by the current user.
else:
assert topmost_parent.history.user == user, 'Data does not belong to current user'
# Ensure HDA is deleted
hda.deleted = True
# HDA is purgeable
# Decrease disk usage first
if user:
user.adjust_total_disk_usage(-hda.quota_amount(user))
# Mark purged
hda.purged = True
trans.sa_session.add( hda )
trans.log_event( "HDA id %s has been purged" % hda.id )
trans.sa_session.flush()
# Don't delete anything if there are active HDAs or any LDDAs, even if
# the LDDAs are deleted. Let the cleanup scripts get it in the latter
# case.
if hda.dataset.user_can_purge:
try:
hda.dataset.full_delete()
trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
trans.sa_session.add( hda.dataset )
except:
log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) )
trans.sa_session.flush()
except Exception as exc:
msg = 'HDA purge failed (encoded: %s, decoded: %s): %s' % ( dataset_id, id, exc )
log.exception( msg )
trans.log_event( msg )
message = 'Dataset removal from disk failed'
status = 'error'
return ( message, status )
@web.expose
def delete( self, trans, dataset_id, filename, show_deleted_on_refresh=False ):
message, status = self._delete( trans, dataset_id )
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
def delete_async( self, trans, dataset_id, filename ):
message, status = self._delete( trans, dataset_id )
if status == 'done':
return "OK"
else:
raise Exception( message )
@web.expose
def undelete( self, trans, dataset_id, filename ):
message, status = self._undelete( trans, dataset_id )
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=True, message=message, status=status ) )
@web.expose
def undelete_async( self, trans, dataset_id, filename ):
message, status = self._undelete( trans, dataset_id )
if status == 'done':
return "OK"
else:
raise Exception( message )
@web.expose
def unhide( self, trans, dataset_id, filename ):
if self._unhide( trans, dataset_id ):
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden=True ) )
raise Exception( "Error unhiding" )
@web.expose
def purge( self, trans, dataset_id, filename, show_deleted_on_refresh=False ):
if trans.app.config.allow_user_dataset_purge:
message, status = self._purge( trans, dataset_id )
else:
message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
status = 'error'
return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
@web.expose
def purge_async( self, trans, dataset_id, filename ):
if trans.app.config.allow_user_dataset_purge:
message, status = self._purge( trans, dataset_id )
else:
message = "Removal of datasets by users is not allowed in this Galaxy instance. Please contact your Galaxy administrator."
status = 'error'
if status == 'done':
return "OK"
else:
raise Exception( message )
@web.expose
def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
"""
Show the parameters used for the job associated with an HDA
"""
try:
hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( dataset_id ) )
except ValueError:
hda = None
if not hda:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % escape( str( dataset_id ) ) )
if not self._can_access_dataset( trans, hda ):
return trans.show_error_message( "You are not allowed to access this dataset" )
# Get the associated job, if any. If this hda was copied from another,
# we need to find the job that created the origial dataset association.
params_objects = None
job = None
tool = None
upgrade_messages = {}
has_parameter_errors = False
inherit_chain = hda.source_dataset_chain
if inherit_chain:
job_dataset_association = inherit_chain[-1][0]
else:
job_dataset_association = hda
if job_dataset_association.creating_job_associations:
job = job_dataset_association.creating_job_associations[0].job
if job:
# Get the tool object
try:
# Load the tool
toolbox = self.get_toolbox()
tool = toolbox.get_tool( job.tool_id )
assert tool is not None, 'Requested tool has not been loaded.'
# Load parameter objects, if a parameter type has changed, it's possible for the value to no longer be valid
try:
params_objects = job.get_param_values( trans.app, ignore_errors=False )
except:
params_objects = job.get_param_values( trans.app, ignore_errors=True )
# use different param_objects in the following line, since we want to display original values as much as possible
upgrade_messages = tool.check_and_update_param_values( job.get_param_values( trans.app, ignore_errors=True ),
trans,
update_values=False )
has_parameter_errors = True
except:
pass
if job is None:
return trans.show_error_message( "Job information is not available for this dataset." )
# TODO: we should provide the basic values along with the objects, in order to better handle reporting of old values during upgrade
return trans.fill_template( "show_params.mako",
inherit_chain=inherit_chain,
history=trans.get_history(),
hda=hda,
job=job,
tool=tool,
params_objects=params_objects,
upgrade_messages=upgrade_messages,
has_parameter_errors=has_parameter_errors )
@web.expose
def copy_datasets( self, trans, source_history=None, source_content_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ):
user = trans.get_user()
if source_history is not None:
decoded_source_history_id = self.decode_id( source_history )
history = self.history_manager.get_owned( decoded_source_history_id, trans.user, current_history=trans.history )
current_history = trans.get_history()
else:
history = current_history = trans.get_history()
refresh_frames = []
if source_content_ids:
if not isinstance( source_content_ids, list ):
source_content_ids = source_content_ids.split(",")
encoded_dataset_collection_ids = [ s[ len("dataset_collection|"): ] for s in source_content_ids if s.startswith("dataset_collection|") ]
encoded_dataset_ids = [ s[ len("dataset|"): ] for s in source_content_ids if s.startswith("dataset|") ]
decoded_dataset_collection_ids = set(map( self.decode_id, encoded_dataset_collection_ids ))
decoded_dataset_ids = set(map( self.decode_id, encoded_dataset_ids ))
else:
decoded_dataset_collection_ids = []
decoded_dataset_ids = []
if new_history_name:
target_history_ids = []
else:
if target_history_id:
target_history_ids = [ self.decode_id(target_history_id) ]
elif target_history_ids:
if not isinstance( target_history_ids, list ):
target_history_ids = target_history_ids.split(",")
target_history_ids = list(set([ self.decode_id(h) for h in target_history_ids if h ]))
else:
target_history_ids = []
done_msg = error_msg = ""
new_history = None
if do_copy:
invalid_contents = 0
if not ( decoded_dataset_ids or decoded_dataset_collection_ids ) or not ( target_history_ids or new_history_name ):
error_msg = "You must provide both source datasets and target histories. "
else:
if new_history_name:
new_history = trans.app.model.History()
new_history.name = new_history_name
new_history.user = user
trans.sa_session.add( new_history )
trans.sa_session.flush()
target_history_ids.append( new_history.id )
if user:
target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ]
else:
target_histories = [ history ]
if len( target_histories ) != len( target_history_ids ):
error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) )
source_contents = map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, decoded_dataset_ids )
source_contents.extend( map( trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get, decoded_dataset_collection_ids ) )
source_contents.sort(key=lambda content: content.hid)
for content in source_contents:
if content is None:
error_msg = error_msg + "You tried to copy a dataset that does not exist. "
invalid_contents += 1
elif content.history != history:
error_msg = error_msg + "You tried to copy a dataset which is not in your current history. "
invalid_contents += 1
else:
for hist in target_histories:
if content.history_content_type == "dataset":
hist.add_dataset( content.copy( copy_children=True ) )
else:
copy_collected_datasets = True
copy_kwds = {}
if copy_collected_datasets:
copy_kwds["element_destination"] = hist
hist.add_dataset_collection( content.copy( **copy_kwds ) )
if current_history in target_histories:
refresh_frames = ['history']
trans.sa_session.flush()
hist_names_str = ", ".join( ['<a href="%s" target="_top">%s</a>' %
( url_for( controller="history", action="switch_to_history",
hist_id=trans.security.encode_id( hist.id ) ), escape(hist.name) )
for hist in target_histories ] )
num_source = len( source_content_ids ) - invalid_contents
num_target = len(target_histories)
done_msg = "%i %s copied to %i %s: %s." % (num_source, inflector.cond_plural(num_source, "dataset"), num_target, inflector.cond_plural(num_target, "history"), hist_names_str )
trans.sa_session.refresh( history )
source_contents = history.active_contents
target_histories = [history]
if user:
target_histories = user.active_histories
return trans.fill_template( "/dataset/copy_view.mako",
source_history=history,
current_history=current_history,
source_content_ids=source_content_ids,
target_history_id=target_history_id,
target_history_ids=target_history_ids,
source_contents=source_contents,
target_histories=target_histories,
new_history_name=new_history_name,
done_msg=done_msg,
error_msg=error_msg,
refresh_frames=refresh_frames )
def _copy_datasets( self, trans, dataset_ids, target_histories, imported=False ):
""" Helper method for copying datasets. """
user = trans.get_user()
done_msg = error_msg = ""
invalid_datasets = 0
if not dataset_ids or not target_histories:
error_msg = "You must provide both source datasets and target histories."
else:
# User must own target histories to copy datasets to them.
for history in target_histories:
if user != history.user:
error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_histories ) )
for dataset_id in dataset_ids:
decoded_id = self.decode_id( dataset_id )
data = self.hda_manager.get_accessible( decoded_id, trans.user )
data = self.hda_manager.error_if_uploading( data )
if data is None:
error_msg = error_msg + "You tried to copy a dataset that does not exist or that you do not have access to. "
invalid_datasets += 1
else:
for hist in target_histories:
dataset_copy = data.copy( copy_children=True )
if imported:
dataset_copy.name = "imported: " + dataset_copy.name
hist.add_dataset( dataset_copy )
trans.sa_session.flush()
num_datasets_copied = len( dataset_ids ) - invalid_datasets
done_msg = "%i dataset%s copied to %i histor%s." % \
( num_datasets_copied, iff( num_datasets_copied == 1, "", "s"), len( target_histories ), iff( len( target_histories ) == 1, "y", "ies") )
trans.sa_session.refresh( history )
if error_msg != "":
status = ERROR
message = error_msg
else:
status = SUCCESS
message = done_msg
return status, message
|
default
|
main.py
|
import markdown
|
input="resume.md",
output="index.html",
encoding="utf8"
)
|
markdown.markdownFromFile(
|
RedisPrompt.ts
|
// imports.
|
import * as types from 'lib/console/compose/builder/prompts/types';
// Class RedisPrompt.
export class RedisPrompt extends ServicePrompt {
// service / question name.
public name: string = 'Redis';
// service slug (lowercase, normalized name).
public slug: string = 'redis';
// docker image name.
public image: string = 'redis';
// tags.
public tags: string[] = ['5-alpine', '4-alpine'];
// port mappings.
public ports: types.IPromptPort[] = [
{ name: 'default', port: '6379' },
];
// list of mount points.
public mountPoints: types.IPromptMount[] = [
{ source: 'redis-data', target: '/data' },
];
// list of mount points.
public volumes: types.IPromptVolume[] = [
{ name: 'redis-data', driver: 'local' },
];
}
|
import { ServicePrompt } from 'lib/console/compose/builder/prompts';
|
root.go
|
package command
import (
"fmt"
"os"
"strings"
"github.com/kyma-project/control-plane/components/kyma-environment-broker/cmd/cli/credential"
"github.com/kyma-project/control-plane/components/kyma-environment-broker/cmd/cli/logger"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
// Version is the CLI version to be filled in by the build system
var Version string = "N/A"
// New constructs a new root command for the kcp CLI.
func New(log logger.Logger) *cobra.Command {
cobra.OnInitialize(initConfig)
description := fmt.Sprintf(`KCP CLI (Kyma Control Plane CLI) is a day-two operations tool for Kyma Runtimes, which allows you to view and manage the Runtimes in scale.
It is possible to list and observe attributes and state of each Kyma Runtime, and perform various operations on them, such as upgrading the Kyma version.
You can find the complete list of possible operations as commands below.
The CLI supports configuration file for common (global) options needed for all commands. The config file will be looked up in this order:
- --config {PATH} option
- KCPCONFIG environment variable which contains the path
- $HOME/.kcp/config.yaml (default path).
The configuration file is in YAML format and supports the following global options: %s, %s, %s, %s, %s, %s.
See the **Global Options** section of each command for the description of these options.`, GlobalOpts.oidcIssuerURL, GlobalOpts.oidcClientID, GlobalOpts.oidcClientSecret, GlobalOpts.kebAPIURL, GlobalOpts.kubeconfigAPIURL, GlobalOpts.gardenerKubeconfig)
cmd := &cobra.Command{
Use: "kcp",
Short: "Day-two operations tool for Kyma Runtimes.",
Long: description,
Version: Version,
PersistentPreRunE: func(cmd *cobra.Command, _ []string) error {
if cmd.CalledAs() != "help" {
return ValidateGlobalOpts()
}
return nil
},
SilenceUsage: true,
}
cmd.PersistentFlags().StringVar(&configPath, "config", os.Getenv(configEnv), "Path to the KCP CLI config file. Can also be set using the KCPCONFIG environment variable. Defaults to $HOME/.kcp/config.yaml .")
SetGlobalOpts(cmd)
log.AddFlags(cmd.PersistentFlags())
cmd.PersistentFlags().BoolP("help", "h", false, "Option that displays help for the CLI.")
cmd.AddCommand(
NewLoginCmd(log),
NewRuntimeCmd(log),
NewOrchestrationCmd(log),
NewKubeconfigCmd(log),
NewUpgradeCmd(log),
NewTaskRunCmd(log),
)
return cmd
}
func initConfig() {
// If config file is set via flags or ENV, use that path,
// otherwise try to load the config from $HOME/{configDir}/config.yaml
if configPath != "" {
viper.SetConfigFile(configPath)
} else {
home, err := os.UserHomeDir()
|
configPath = fmt.Sprintf("%s/%s", home, configDir)
viper.AddConfigPath(configPath)
viper.SetConfigName("config")
}
viper.SetConfigType("yaml")
replacer := strings.NewReplacer("-", "_")
viper.SetEnvKeyReplacer(replacer)
viper.SetEnvPrefix("KCP")
viper.AutomaticEnv()
err := viper.ReadInConfig()
// Ignore when config file is not found to allow config parameters being passed as flags or environment variables
// Panic otherwise
if _, ok := err.(viper.ConfigFileNotFoundError); err != nil && !ok {
fmt.Println("Error:", err)
os.Exit(1)
}
}
// CLICredentialManager returns a credential.Manager configured using the CLI global options
func CLICredentialManager(logger logger.Logger) credential.Manager {
return credential.NewManager(GlobalOpts.OIDCIssuerURL(), GlobalOpts.OIDCClientID(), GlobalOpts.OIDCClientSecret(), logger)
}
|
if err != nil {
fmt.Println("Error:", err)
os.Exit(1)
}
|
cboe.rs
|
use super::Range;
use byteorder::{BigEndian, ByteOrder};
pub fn get_sequence(data: &[u8]) -> Range {
if data.len() < 8 {
// Packet smaller than header, should 'never' happen
// if it does return 0 sequence numbers. If this as a corrupted packet
// this will, appropriately, show up as a gap.
return Range { begin: 0, count: 0 };
}
let count = data[2];
let seq_num = BigEndian::read_u32(&data[4..]);
if seq_num == 0
|
Range {
begin: seq_num as u64,
count: count as u64,
}
}
|
{
return Range { begin: 0, count: 0 };
}
|
SegmentTitle.js
|
import React from "react";
// reactstrap components
import { Container, Row, Col } from "reactstrap";
// core components
function
|
(props) {
return (
<>
<div className="section">
<Container className="text-center">
<Row className="justify-content-md-center">
<Col lg="8" md="12">
<h2 className="title">{props.text}</h2>
</Col>
</Row>
</Container>
</div>
</>
);
}
export default SegmentTitle;
|
SegmentTitle
|
ConsigneeDashBoard.js
|
import React from 'react';
import {Table, Button} from "semantic-ui-react";
const ConsigneeDashBoard = ({consignees}) => {
return (
<Table striped celled compact selectable >
|
<Table.HeaderCell>Company Name</Table.HeaderCell>
<Table.HeaderCell>Address</Table.HeaderCell>
<Table.HeaderCell>Location</Table.HeaderCell>
<Table.HeaderCell>Action</Table.HeaderCell>
</Table.Row>
</Table.Header>
<Table.Body>
{consignees.map((consignee, index) => {
const {id, company_name, address, city, country_code} = consignee
return (
<Table.Row key={index}>
<Table.Cell>{company_name}</Table.Cell>
<Table.Cell>{address}</Table.Cell>
<Table.Cell>{city}, {country_code}</Table.Cell>
<Table.Cell>
{/*<Link href={`/consignee/${id}`}>*/}
<Button as={'a'} size={'mini'} primary>
update
</Button>
{/*</Link>*/}
{/*<Link href={`/consignee/${id}`}>*/}
<Button as={'a'} size={'mini'} basic disabled>
delete
</Button>
{/*</Link>*/}
</Table.Cell>
</Table.Row>
)
})}
</Table.Body>
</Table>
);
};
export default ConsigneeDashBoard;
|
<Table.Header>
<Table.Row>
|
default.py
|
class DefaultColor(object):
"""
This class should have the default colors for every segment.
Please test every new segment with this theme first.
"""
# RESET is not a real color code. It is used as in indicator
# within the code that any foreground / background color should
# be cleared
RESET = -1
USERNAME_FG = 250
USERNAME_BG = 240
USERNAME_ROOT_FG = 250
USERNAME_ROOT_BG = 124
HOSTNAME_FG = 250
HOSTNAME_BG = 238
HOME_SPECIAL_DISPLAY = True
HOME_BG = 31 # blueish
HOME_FG = 15 # white
PATH_BG = 237 # dark grey
PATH_FG = 250 # light grey
CWD_FG = 254 # nearly-white grey
SEPARATOR_FG = 244
READONLY_BG = 124
READONLY_FG = 254
SSH_BG = 166 # medium orange
SSH_FG = 254
REPO_CLEAN_BG = 148 # a light green color
REPO_CLEAN_FG = 0 # black
REPO_DIRTY_BG = 161 # pink/red
REPO_DIRTY_FG = 15 # white
JOBS_FG = 39
JOBS_BG = 238
CMD_PASSED_BG = 236
CMD_PASSED_FG = 15
CMD_FAILED_BG = 161
|
SVN_CHANGES_FG = 22 # dark green
GIT_AHEAD_BG = 240
GIT_AHEAD_FG = 250
GIT_BEHIND_BG = 240
GIT_BEHIND_FG = 250
GIT_STAGED_BG = 22
GIT_STAGED_FG = 15
GIT_NOTSTAGED_BG = 130
GIT_NOTSTAGED_FG = 15
GIT_UNTRACKED_BG = 52
GIT_UNTRACKED_FG = 15
GIT_CONFLICTED_BG = 9
GIT_CONFLICTED_FG = 15
GIT_STASH_BG = 221
GIT_STASH_FG = 0
VIRTUAL_ENV_BG = 35 # a mid-tone green
VIRTUAL_ENV_FG = 00
BATTERY_NORMAL_BG = 22
BATTERY_NORMAL_FG = 7
BATTERY_LOW_BG = 196
BATTERY_LOW_FG = 7
AWS_PROFILE_FG = 39
AWS_PROFILE_BG = 238
TIME_FG = 250
TIME_BG = 238
CONST_FG = 15
CONST_BG = 0
class Color(DefaultColor):
"""
This subclass is required when the user chooses to use 'default' theme.
Because the segments require a 'Color' class for every theme.
"""
pass
|
CMD_FAILED_FG = 15
SVN_CHANGES_BG = 148
|
core.rs
|
extern crate chrono;
extern crate serde;
use serde::{Serialize, Deserialize};
use chrono::{DateTime, Local, Timelike, Weekday};
/// Returns current DateTime<Local> with (nano)seconds set to zero.
pub fn now_rounded() -> DateTime<Local> {
Local::now().with_second(0).unwrap().with_nanosecond(0).unwrap()
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Person {
pub name: String,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all="snake_case")]
pub enum Recurrence {
Daily,
Weekly(Weekday),
Monthly { week: u32, day: Weekday },
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(tag="type")]
pub enum Occurrence {
OneTime,
Periodic { recurrence: Recurrence },
}
/// The Task struct.
///
/// Occurrence can be either OneTime or Periodic.
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all="snake_case")]
pub struct Task {
pub title: String,
pub description: String,
//tags: Option<Vec<Tag>>,
pub occurrence: Occurrence,
pub effort: Vec<f64>,
#[serde(default)]
pub done: f64,
pub created_at: DateTime<Local>,
#[serde(default)]
pub due_at: Option<DateTime<Local>>,
#[serde(default)]
pub relates_to: Option<Vec<Task>>,
#[serde(default)]
pub depends_on: Option<Vec<Task>>,
#[serde(default)]
pub started_at: Option<DateTime<Local>>,
#[serde(default)]
pub paused_at: Option<Vec<DateTime<Local>>>,
#[serde(default)]
pub resumed_at: Option<Vec<DateTime<Local>>>,
#[serde(default)]
pub finished_at: Option<DateTime<Local>>,
#[serde(default)]
pub cancelled_at: Option<DateTime<Local>>,
#[serde(default)]
pub people: Option<Vec<Person>>,
//pub notes: Option<Vec<Note>>,
//pub attachments: Option<Vec<Attachment>>,
}
impl Task {
pub fn is_valid(&self) -> bool {
! self.is_invalid()
}
fn is_invalid(&self) -> bool {
self.title.is_empty() ||
self.description.is_empty()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn create_task() -> Result<(), String> {
let mut task = Task {
title: String::from("Title"),
description: String::from("Description"),
created_at: Local::now(),
done: 0.0,
effort: vec![5.0],
occurrence: Occurrence::Periodic {
recurrence: Recurrence::Weekly(Weekday::Mon) },
due_at: None,
relates_to: None,
depends_on: None,
started_at: None,
paused_at: None,
resumed_at: None,
finished_at: None,
cancelled_at: None,
people: None,
};
assert_eq!("Title", task.title);
assert_eq!("Description", task.description);
assert_eq!(1, task.effort.len());
assert!(task.is_valid());
task.title.clear();
assert!(! task.is_valid());
Ok(())
}
#[test]
fn serialize_a_task() -> Result<(), String> {
let mut task = Task {
title: String::from("Title"),
description: String::from("Description"),
created_at: now_rounded(),
done: 0.0,
effort: vec![5.0],
occurrence: Occurrence::Periodic {
recurrence: Recurrence::Weekly(Weekday::Mon) },
due_at: None,
relates_to: None,
depends_on: None,
started_at: None,
paused_at: None,
resumed_at: None,
finished_at: None,
cancelled_at: None,
people: None,
};
match serde_yaml::to_string(&task) {
Ok(y) => println!("{}", y),
Err(reason) => return Err(format!("{}", reason)),
};
task.occurrence = Occurrence::OneTime;
match serde_yaml::to_string(&task) {
Ok(y) => println!("{}", y),
Err(reason) => return Err(format!("{}", reason)),
};
Ok(())
}
#[test]
fn deserialize_a_task() -> Result<(), String> {
let task_str = r#"---
title: Title
description: Description
created_at: 2019-10-09T13:00:00+02:00
occurrence:
type: Periodic
recurrence:
monthly:
week: 3
day: Fri
effort: [10.0]"#;
match serde_yaml::from_str::<Task>(task_str) {
Ok(task) => println!("task: {:?}", task),
Err(reason) => return Err(format!("{}", reason)),
}
Ok(())
}
#[test]
fn
|
() -> Result<(), String> {
let task = Task {
title: String::from("Title"),
description: String::from("Description"),
created_at: now_rounded(),
done: 0.0,
effort: vec![5.0],
occurrence: Occurrence::Periodic {
recurrence: Recurrence::Weekly(Weekday::Mon) },
due_at: None,
relates_to: None,
depends_on: None,
started_at: None,
paused_at: None,
resumed_at: None,
finished_at: None,
cancelled_at: None,
people: None,
};
match serde_yaml::to_string(&task) {
Ok(y) => println!("{}", y),
Err(reason) => return Err(format!("{}", reason)),
};
Ok(())
}
#[test]
fn deserialize_a_task_with_multiline_text() -> Result<(), String> {
let task_str = r#"---
title: Title
description: |
This
is a
multi
line
description.
created_at: 2019-10-09T13:00:00+02:00
occurrence:
type: Periodic
recurrence:
monthly:
week: 3
day: Fri
effort: [10.0]"#;
match serde_yaml::from_str::<Task>(task_str) {
Ok(task) => println!("task: {:?}", task),
Err(reason) => return Err(format!("{}", reason)),
}
Ok(())
}
}
|
serialize_a_task_with_multiline_text
|
test_bot.py
|
import unittest.mock
from programy.brain import Brain
from programy.bot import BrainFactory
from programy.bot import Bot
from programy.config.bot.bot import BotConfiguration
from programy.config.programy import ProgramyConfiguration
from programy.clients.events.console.config import ConsoleConfiguration
from programy.context import ClientContext
from programy.config.bot.spelling import BotSpellingConfiguration
from programytest.client import TestClient
class MockBrain(Brain):
def __init__(self, bot, configuration):
Brain.__init__(self, bot, configuration)
self._response = ""
def ask_question(self, clientid, sentence, srai=False):
return self._response
class MockBot(Bot):
def __init__(self, config: BotConfiguration, client):
Bot.__init__(self, config, client)
def loads_brains(self, bot):
self._brains["mock"] = MockBrain(self, self.configuration.configurations[0])
class BrainFactoryTests(unittest.TestCase):
def test_empty_config_init(self):
configuration = BotConfiguration()
configuration._bot_selector = "programy.clients.client.DefaultBrainSelector"
client = TestClient()
bot = Bot(configuration, client)
factory = BrainFactory(bot)
self.assertIsNotNone(factory)
brain = factory.select_brain()
self.assertIsNotNone(brain)
self.assertIsInstance(brain, Brain)
class BotTests(unittest.TestCase):
def setUp(self):
client = TestClient()
self._client_context = client.create_client_context("testid")
def test_bot_init_blank(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot.brain)
self.assertEqual("bot", bot.ylogger_type())
self.assertIsNone(bot.spell_checker)
self.assertIsNotNone(bot.sentence_splitter)
self.assertIsNotNone(bot.sentence_joiner)
self.assertIsNotNone(bot.conversations)
self.assertIsNotNone(bot.default_response)
self.assertIsNotNone(bot.exit_response)
self.assertIsNotNone(bot.initial_question)
self.assertTrue(bot.override_properties)
self.assertIsNotNone(bot.get_version_string)
def test_bot_init_with_config(self):
bot_config = BotConfiguration()
bot_config._bot_root = BotConfiguration.DEFAULT_ROOT
bot_config._default_response = BotConfiguration.DEFAULT_RESPONSE
bot_config._exit_response = BotConfiguration.DEFAULT_EXIT_RESPONSE
bot_config._initial_question = BotConfiguration.DEFAULT_INITIAL_QUESTION
bot_config._empty_string = BotConfiguration.DEFAULT_EMPTY_STRING
bot_config._override_properties = BotConfiguration.DEFAULT_OVERRIDE_PREDICATES
bot_config._max_question_recursion = 1000
bot_config._max_question_timeout = 60
bot_config._max_search_depth = 100
bot_config._max_search_timeout = 60
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot.brain)
self.assertIsNone(bot.spell_checker)
self.assertIsNotNone(bot.sentence_splitter)
self.assertIsNotNone(bot.sentence_joiner)
self.assertIsNotNone(bot.conversations)
self.assertIsNotNone(bot.default_response)
self.assertIsNotNone(bot.exit_response)
self.assertIsNotNone(bot.initial_question)
self.assertTrue(bot.override_properties)
self.assertIsNotNone(bot.get_version_string)
def test_bot_old_version(self):
bot_config = BotConfiguration()
client = TestClient()
bot = Bot(bot_config, client)
self._client_context.brain.properties.add_property("name", 'bot'),
self._client_context.brain.properties.add_property("version", "1.9.3"),
self._client_context.brain.properties.add_property("birthdate", "1st January 2019")
version = bot.get_version_string(self._client_context)
self.assertIsNotNone(version)
self.assertEqual("bot, v1.9.3, initiated 1st January 2019", version)
def test_bot_new_version(self):
bot_config = BotConfiguration()
client = TestClient()
bot = Bot(bot_config, client)
self._client_context.brain.properties.add_property("name", 'bot'),
self._client_context.brain.properties.add_property("app_version", "1.9.3"),
self._client_context.brain.properties.add_property("grammar_version", "37"),
|
version = bot.get_version_string(self._client_context)
self.assertIsNotNone(version)
self.assertEqual("bot, App: v1.9.3 Grammar v37, initiated 1st January 2019", version)
def test_bot_init_no_spellchecker_configuration(self):
bot_config = BotConfiguration()
bot_config._spelling = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.spell_checker)
def test_bot_init_no_spellchecker(self):
bot_config = BotConfiguration()
bot_config.spelling._classname = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.spell_checker)
def test_bot_init_with_invalid_spellchecker(self):
bot_config = BotConfiguration()
bot_config.spelling._classname = "programy.spelling.checker.SpellingCheckerX"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.spell_checker)
def test_bot_init_with_valid_spellchecker(self):
bot_config = BotConfiguration()
bot_config.spelling._classname = "programy.spelling.textblob_spelling.TextBlobSpellingChecker"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.spell_checker)
def test_bot_init_no_splitter_configuration(self):
bot_config = BotConfiguration()
bot_config._splitter = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentence_splitter)
def test_bot_init_no_splitterr(self):
bot_config = BotConfiguration()
bot_config.splitter._classname = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentence_splitter)
def test_bot_init_with_invalid_splitterr(self):
bot_config = BotConfiguration()
bot_config.splitter._classname = "programy.spelling.checker.SpellingCheckerX"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentence_splitter)
def test_bot_init_with_valid_splitterr(self):
bot_config = BotConfiguration()
bot_config.splitter._classname = "programy.dialog.splitter.splitter.SentenceSplitter"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.sentence_splitter)
def test_bot_init_no_joiner_configuration(self):
bot_config = BotConfiguration()
bot_config._joiner = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentence_joiner)
def test_bot_init_no_joinerr(self):
bot_config = BotConfiguration()
bot_config.joiner._classname = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentence_joiner)
def test_bot_init_with_invalid_joinerr(self):
bot_config = BotConfiguration()
bot_config.joiner._classname = "programy.spelling.checker.SpellingCheckerX"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentence_joiner)
def test_bot_init_with_valid_joinerr(self):
bot_config = BotConfiguration()
bot_config.joiner._classname = "programy.dialog.joiner.joiner.SentenceJoiner"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.sentence_joiner)
def test_bot_init_no_translator_configuration(self):
bot_config = BotConfiguration()
bot_config._from_translator = None
bot_config._to_translator = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.from_translator)
self.assertIsNone(bot.to_translator)
def test_bot_init_no_translatorr(self):
bot_config = BotConfiguration()
bot_config.from_translator._classname = None
bot_config.to_translator._classname = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.from_translator)
self.assertIsNone(bot.to_translator)
def test_bot_init_with_invalid_translatorr(self):
bot_config = BotConfiguration()
bot_config.from_translator._classname = "programy.spelling.checker.SpellingCheckerX"
bot_config.to_translator._classname = "programy.spelling.checker.SpellingCheckerX"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.from_translator)
self.assertIsNone(bot.to_translator)
def test_bot_init_with_valid_translatorr(self):
bot_config = BotConfiguration()
bot_config.from_translator._classname = "programy.translate.textblob_translator.TextBlobTranslator"
bot_config.to_translator._classname = "programy.translate.textblob_translator.TextBlobTranslator"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.from_translator)
self.assertIsNotNone(bot.to_translator)
def test_bot_init_no_sentiment_analyser_configuration(self):
bot_config = BotConfiguration()
bot_config._sentiment = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentiment_analyser)
def test_bot_init_no_sentiment_analyserr(self):
bot_config = BotConfiguration()
bot_config.sentiment_analyser._classname = None
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentiment_analyser)
def test_bot_init_with_invalid_sentiment_analyserr(self):
bot_config = BotConfiguration()
bot_config.sentiment_analyser._classname = "programy.spelling.checker.SpellingCheckerX"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNone(bot.sentiment_analyser)
def test_bot_init_with_valid_sentiment_analyserr(self):
bot_config = BotConfiguration()
bot_config.sentiment_analyser._classname = "programy.sentiment.textblob_sentiment.TextBlobSentimentAnalyser"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.sentiment_analyser)
def test_bot_init_default_brain(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.brain)
def test_bot_init_supplied_brain(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot)
self.assertIsNotNone(bot.brain)
def test_bot_defaultresponses(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot)
self.assertEqual(bot.default_response, "")
self.assertEqual(bot.exit_response, "Bye!")
def test_bot_with_config(self):
configuration = ProgramyConfiguration(ConsoleConfiguration())
self.assertIsNotNone(configuration)
self.assertIsNotNone(configuration.client_configuration.configurations[0])
self.assertIsNotNone(configuration.client_configuration.configurations[0].configurations[0])
configuration.client_configuration.configurations[0].prompt = ":"
configuration.client_configuration.configurations[0].default_response = "No answer for that"
configuration.client_configuration.configurations[0].exit_response = "See ya!"
client = TestClient()
bot = Bot(configuration.client_configuration.configurations[0], client)
self.assertIsNotNone(bot)
self.assertEqual(bot.default_response, "No answer for that")
self.assertEqual(bot.exit_response, "See ya!")
def test_bot_with_conversation(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot)
self.assertFalse(bot.has_conversation(self._client_context))
response = bot.ask_question(self._client_context, "hello")
self.assertIsNotNone(response)
self.assertTrue(bot.has_conversation(self._client_context))
response = bot.ask_question(self._client_context, "hello")
self.assertIsNotNone(response)
self.assertTrue(bot.has_conversation(self._client_context))
client_context2 = ClientContext(TestClient(), "testid2")
client_context2._bot = bot
client_context2._brain = self._client_context.bot.brain
response = bot.ask_question(client_context2, "hello")
self.assertIsNotNone(response)
self.assertTrue(bot.has_conversation(client_context2))
def test_bot_chat_loop(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot)
self.assertIsInstance(bot, Bot)
bot.configuration._default_response = "Sorry, I don't have an answer for that right now"
response = bot.ask_question(self._client_context, "hello")
self.assertIsNotNone(response)
self.assertEqual(response, "Sorry, I don't have an answer for that right now.")
response = bot.ask_question(self._client_context, "hello again")
self.assertIsNotNone(response)
self.assertEqual(response, "Sorry, I don't have an answer for that right now.")
response = bot.ask_question(self._client_context, "goodbye")
self.assertIsNotNone(response)
self.assertEqual(response, "Sorry, I don't have an answer for that right now.")
conversation = bot.get_conversation(self._client_context)
self.assertIsNotNone(conversation)
self.assertEqual(conversation.previous_nth_question(2).sentence(0).text(), "hello")
self.assertEqual(conversation.previous_nth_question(2).sentence(0).response, "Sorry, I don't have an answer for that right now")
self.assertEqual(conversation.previous_nth_question(1).sentence(0).text(), "hello again")
self.assertEqual(conversation.previous_nth_question(1).sentence(0).response, "Sorry, I don't have an answer for that right now")
self.assertEqual(conversation.previous_nth_question(0).sentence(0).text(), "goodbye")
self.assertEqual(conversation.previous_nth_question(0).sentence(0).response, "Sorry, I don't have an answer for that right now")
def test_max_recusion(self):
client = TestClient()
bot = Bot(BotConfiguration(), client)
self.assertIsNotNone(bot)
bot.configuration._default_response = "Sorry, I don't have an answer for that right now"
bot.configuration._max_question_recursion = 0
with self.assertRaises(Exception):
bot.ask_question(self._client_context, "hello")
def test_get_default_response_empty_string(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("", bot.get_default_response(self._client_context))
def test_get_default_response_default_response_only(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.default_response = "Default response!"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Default response!", bot.get_default_response(self._client_context))
def test_get_default_response_default_response_srai_no_match(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.default_response_srai = "YDEFAULTRESPONSE"
bot_config.default_response = "Default response!"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Default response!", bot.get_default_response(self._client_context))
def test_get_default_response_default_response_srai_match(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.default_response_srai = "YDEFAULTRESPONSE"
bot_config.default_response = "Default response!"
client = TestClient()
bot = MockBot(bot_config, client)
self.assertIsNotNone(bot)
client_context2 = ClientContext(TestClient(), "testid2")
client_context2._bot = bot
client_context2._brain = MockBrain(bot, bot.configuration.configurations[0])
client_context2._brain._response = "Y DEFAULT RESPONSE"
response = bot.get_default_response(client_context2)
self.assertIsNotNone(response)
self.assertEqual("Y DEFAULT RESPONSE", response)
def test_get_default_response_no_srai(self):
bot_config = BotConfiguration()
bot_config._default_response_srai = None
bot_config._default_response = "Test This"
self.assertIsNotNone(bot_config)
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Test This", bot.get_default_response(self._client_context))
def test_get_initial_question_empty_string(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Hello", bot.get_initial_question(self._client_context))
def test_get_initial_question_initial_question_only(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.initial_question = "Default response!"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Default response!", bot.get_initial_question(self._client_context))
def test_get_initial_question_initial_question_srai_no_match(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.initial_question_srai = "YDEFAULTRESPONSE"
bot_config.initial_question = "Default response!"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Default response!", bot.get_initial_question(self._client_context))
def test_get_initial_question_initial_question_srai_match(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
client = TestClient()
bot = MockBot(bot_config, client)
self.assertIsNotNone(bot)
client_context2 = ClientContext(TestClient(), "testid2")
client_context2._bot = bot
client_context2._brain = MockBrain(bot, bot.configuration.configurations[0])
client_context2._brain._response = "Y DEFAULT RESPONSE"
self.assertEqual("Y DEFAULT RESPONSE", bot.get_initial_question(client_context2))
def test_get_initial_question_no_srai(self):
bot_config = BotConfiguration()
bot_config._initial_question = "Test This"
bot_config._initial_question_srai = None
self.assertIsNotNone(bot_config)
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Test This", bot.get_initial_question(self._client_context))
def test_get_exit_response_empty_string(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Bye!", bot.get_exit_response(self._client_context))
def test_get_exit_response_exit_response_only(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.exit_response = "Default response!"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Default response!", bot.get_exit_response(self._client_context))
def test_get_exit_response_exit_response_srai_no_match(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.exit_response_srai = "YDEFAULTRESPONSE"
bot_config.exit_response = "Default response!"
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Default response!", bot.get_exit_response(self._client_context))
def test_get_exit_response_exit_response_srai_match(self):
bot_config = BotConfiguration()
self.assertIsNotNone(bot_config)
bot_config.exit_response_srai = "YDEFAULTRESPONSE"
bot_config.exit_response = "Default response!"
client = TestClient()
bot = MockBot(bot_config, client)
self.assertIsNotNone(bot)
client_context2 = ClientContext(TestClient(), "testid2")
client_context2._bot = bot
client_context2._brain = MockBrain(bot, bot.configuration.configurations[0])
client_context2._brain._response = "Y DEFAULT RESPONSE"
self.assertEqual("Y DEFAULT RESPONSE", bot.get_exit_response(client_context2))
def test_get_exit_response_no_srai(self):
bot_config = BotConfiguration()
bot_config._exit_response = "Test This"
bot_config._exit_response_srai = None
self.assertIsNotNone(bot_config)
client = TestClient()
bot = Bot(bot_config, client)
self.assertIsNotNone(bot)
self.assertEqual("Test This", bot.get_exit_response(self._client_context))
def test_log_answer(self):
bot_config = BotConfiguration()
client = TestClient()
bot = Bot(bot_config, client)
bot.log_answer(self._client_context, "Hello", "Test", None)
response_logger = unittest.mock.Mock()
bot.log_answer(self._client_context, "Hello", "Test", response_logger)
|
self._client_context.brain.properties.add_property("birthdate", "1st January 2019")
|
read.go
|
package iniscanner_separator
import (
"github.com/reiver/go-ini/scanner/error"
"github.com/reiver/go-ini/token"
"bytes"
"io"
)
func
|
(runeScanner io.RuneScanner) (initoken.Separator, int, error) {
if nil == runeScanner {
return initoken.Separator{}, 0, iniscanner_error.NilRuneScanner
}
var buffer bytes.Buffer
var n int
{
r, n2, err := runeScanner.ReadRune()
n += n2
if nil != err && io.EOF != err {
return initoken.SomeSeparator( buffer.String() ), n, iniscanner_error.InternalError(
buffer.String(),
"trying to read rune",
err,
)
}
if io.EOF == err {
return initoken.Separator{}, n, iniscanner_error.SyntaxError(
"not a separator, separators are a \"=\" or a \":\" charcter",
string(r),
)
}
switch {
case Peek(r):
// Nothing here.
default:
return initoken.Separator{}, n, iniscanner_error.SyntaxError(
"not a separator, separators are a \"=\" or a \":\" charcter",
string(r),
)
}
buffer.WriteRune(r)
}
return initoken.SomeSeparator( buffer.String() ), n, nil
}
|
Read
|
Main.js
|
Ext.define('PiClim.view.Main', {
extend: 'Ext.tab.Panel',
xtype: 'main',
requires: [
'Ext.TitleBar',
'Ext.field.Password',
'Ext.field.Checkbox',
'Ext.Video',
'Ext.dataview.List',
'PiClim.store.Temperatures',
'Ext.chart.CartesianChart',
'Ext.chart.axis.Numeric',
'Ext.chart.axis.Time',
'Ext.chart.series.Line',
'Ext.chart.interactions.PanZoom'
],
config: {
tabBarPosition: 'bottom',
items: [
{
title: I18n.MAIN_WELCOME_TITLE_SHORT,
iconCls: 'home',
name: 'home',
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_WELCOME_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_WELCOME_TEXT
}
]
},
{
title: I18n.MAIN_SERVER_TITLE_SHORT,
iconCls: 'download',
name: 'server',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_SERVER_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_SERVER_TEXT
},
{
flex: 1,
xtype: 'container',
layout: {
type: 'vbox',
align: 'center',
pack: 'center'
},
defaults: {
maxWidth: '720px',
width: '100%'
},
items: [
{
xtype: 'titlebar',
title: I18n.MAIN_SERVER_LOGINPANEL_TITLE
},
{
xtype: 'textfield',
label: I18n.MAIN_SERVER_LOGINPANEL_SERVERNAME_LABEL,
placeHolder: I18n.MAIN_SERVER_LOGINPANEL_SERVERNAME_HOLDER,
name: 'url'
},
{
xtype: 'component',
height: 10
},
{
xtype: 'button',
text: I18n.MAIN_SERVER_LOGINPANEL_CONNECT_LABEL,
disabled: true
}
]
}
]
},
{
title: I18n.MAIN_USERADD_TITLE_SHORT,
iconCls: 'user',
name: 'firstuser',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_USERADD_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_USERADD_TEXT
},
{
flex: 1,
xtype: 'container',
layout: {
type: 'vbox',
align: 'center',
pack: 'center'
},
defaults: {
maxWidth: '720px',
width: '100%'
},
items: [
{
xtype: 'titlebar',
title: I18n.MAIN_USERADD_CREATIONPANEL_TITLE
},
{
xtype: 'textfield',
label: I18n.MAIN_USERADD_CREATIONPANEL_LOGIN_LABEL,
placeHolder: I18n.MAIN_USERADD_CREATIONPANEL_LOGIN_HOLDER,
name: 'login'
},
{
xtype: 'passwordfield',
label: I18n.MAIN_USERADD_CREATIONPANEL_PASSWORD_LABEL,
placeHolder: I18n.MAIN_USERADD_CREATIONPANEL_PASSWORD_HOLDER,
name: 'password'
},
{
xtype: 'passwordfield',
label: I18n.MAIN_USERADD_CREATIONPANEL_CONFIRMPASSWORD_LABEL,
placeHolder: I18n.MAIN_USERADD_CREATIONPANEL_CONFIRMPASSWORD_HOLDER,
name: 'confirmpassword'
},
{
xtype: 'textfield',
label: I18n.MAIN_USERADD_CREATIONPANEL_FULLNAME_LABEL,
placeHolder: I18n.MAIN_USERADD_CREATIONPANEL_FULLNAME_HOLDER,
name: 'fullname'
},
{
xtype: 'textfield',
label: I18n.MAIN_USERADD_CREATIONPANEL_EMAIL_LABEL,
placeHolder: I18n.MAIN_USERADD_CREATIONPANEL_EMAIL_HOLDER,
name: 'email'
},
{
xtype: 'component',
height: 10
},
{
xtype: 'button',
text: I18n.MAIN_USERADD_CREATIONPANEL_CREATE_LABEL,
disabled: true
}
]
}
]
},
{
title: I18n.MAIN_USER_TITLE_SHORT,
iconCls: 'user',
name: 'user',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_USER_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_USER_TEXT
},
{
flex: 1,
xtype: 'container',
layout: {
type: 'vbox',
align: 'center',
pack: 'center'
},
defaults: {
maxWidth: '720px',
width: '100%'
},
items: [
{
xtype: 'titlebar',
title: I18n.MAIN_USER_LOGINPANEL_TITLE
},
{
xtype: 'textfield',
label: I18n.MAIN_USER_LOGINPANEL_LOGIN_LABEL,
placeHolder: I18n.MAIN_USER_LOGINPANEL_LOGIN_HOLDER,
name: 'login'
},
{
xtype: 'passwordfield',
label: I18n.MAIN_USER_LOGINPANEL_PASSWORD_LABEL,
placeHolder: I18n.MAIN_USER_LOGINPANEL_PASSWORD_HOLDER,
name: 'password'
},
{
xtype: 'checkboxfield',
label: I18n.MAIN_USER_LOGINPANEL_REMEMBER_LABEL,
name: 'remember'
},
{
xtype: 'component',
height: 10
},
{
xtype: 'button',
text: I18n.MAIN_USER_LOGINPANEL_LOGIN_LABEL,
disabled: true
}
]
}
]
},
{
title: I18n.MAIN_WELCOME_TITLE_SHORT,
iconCls: 'home',
name: 'home2',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: "",
name: "title",
items: [
{
iconCls: 'delete',
align: 'right',
name: 'delete'
}
]
},
{
xtype: 'component',
html: I18n.MAIN_WELCOME_TEXT
}
]
},
{
title: I18n.MAIN_TEMPERATURES_TITLE_SHORT,
iconCls: 'time',
name: 'temperatures',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_TEMPERATURES_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_TEMPERATURES_TEXT
},
{
xtype: 'chart',
flex: 1,
store: Ext.create("PiClim.store.Temperatures", {
fields: ['time'],
autoLoad: false
}),
legend: {
position: 'bottom'
},
interactions: [{
type: 'panzoom',
axes: {
"left": {
allowZoom: false,
allowPan: true
},
"bottom": {
allowZoom: true,
allowPan: true
}
}
}],
axes: [
{
type: 'numeric',
position: 'left',
fields: [],
title: {
text: I18n.MAIN_TEMPERATURES_AXE_TEMP,
fontSize: 15
},
grid: true,
visibleRange: [0.45, 0.75],
minimum: -60,
maximum: 60
},
{
name: 'timeAxis',
type: 'time',
position: 'bottom',
fields: ['time'],
fromDate: Ext.Date.add(new Date(), Ext.Date.YEAR, -5),
toDate: new Date(),
visibleRange: [0.9994521, 1],
title: {
text: I18n.MAIN_TEMPERATURES_AXE_TIME,
fontSize: 15
},
grid: true,
style: {
axisLine: true
}
}
]
}
]
},
{
title: I18n.MAIN_USERS_TITLE_SHORT,
iconCls: 'user',
name: 'users',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_USERS_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_USERS_TEXT
},
{
flex: 1,
xtype: 'container',
layout: {
type: 'vbox',
align: 'center',
pack: 'center'
},
height: '80%',
defaults: {
maxWidth: '720px',
width: '100%'
},
items: [
{
xtype: 'titlebar',
title: I18n.MAIN_USERS_LIST_HEADER
},
{
xtype: 'list',
store: {
autoLoad: false,
model: 'PiClim.model.User',
proxy: {
type: 'ajax',
url: 'service/users_list.php',
reader: {
type: "json",
rootProperty: "users"
}
}
},
flex: 1,
itemTpl: '{fullname} - {email} ({login})'
}
]
}
]
},
{
title: I18n.MAIN_SETTINGS_TITLE_SHORT,
iconCls: 'settings',
name: 'settings',
hidden: true,
styleHtmlContent: true,
scrollable: null,
layout: {
type: 'vbox'
},
items: [
{
docked: 'top',
xtype: 'titlebar',
title: I18n.MAIN_SETTINGS_TITLE_LONG
},
{
xtype: 'component',
html: I18n.MAIN_SETTINGS_TEXT
},
{
flex: 1,
xtype: 'container',
layout: {
type: 'vbox',
align: 'center',
pack: 'center'
},
defaults: {
maxWidth: '720px',
width: '100%'
},
items: [
{
xtype: 'titlebar',
title: I18n.MAIN_SETTINGS_OPTIONS_TITLE
},
{
xtype: 'button',
name: 'update',
|
}
]
}
]
}
});
|
text: I18n.MAIN_SETTINGS_OPTIONS_UPDATE_TITLE
}
]
|
XinjiangSpider.py
|
import scrapy
import pickle
import os
import ast
from urllib import parse
from scrapy.selector import Selector
class XinjiangSpider(scrapy.Spider):
name = "Xinjiang"
if not os.path.exists('../../data/HTML_pk/%s' % name):
os.makedirs('../../data/HTML_pk/%s' % name)
if not os.path.exists('../../data/text/%s' % name):
os.makedirs('../../data/text/%s' % name)
def
|
(self):
total_page = 34
# total_page = 3
url_base = 'http://www.xinjiang.gov.cn/xinjiang/gfxwj/zfxxgk_gknrz{0}.shtml'
for i in range(total_page):
page = '_'+ str(i+1) if i > 0 else ''
yield scrapy.Request(url=url_base.format(page), callback=self.parse)
def parse(self,response):
detail_page_links = []
for dd in response.css('div.gknr_list dd'):
url = response.urljoin(dd.css('a::attr(href)').get())
UID = url.split('/')[-1][:-6]
if '?' not in UID:
detail_page_links.append(url)
yield {
'UID': UID,
'title': dd.css('a::attr(title)').get(),
'date': dd.css('span::text').get(),
'FileNumber':None,
'text length':0,
'url': url,
'crawl state':'half'
}
yield from response.follow_all(detail_page_links, callback = self.parse_content)
def parse_content(self, response):
UID = response.url.split('/')[-1][:-6]
doc_info_dict = {}
for li in response.css('ul.clearfix li'):
tmp_l = li.css('*::text').getall()
if len(tmp_l) == 2:
doc_info_dict[tmp_l[0]] = tmp_l[1]
else:
tmp_l = tmp_l[0].split(':')
if len(tmp_l) == 2:
doc_info_dict[tmp_l[0]] = tmp_l[1]
File_num = None
if '发文字号' in doc_info_dict.keys():
File_num = doc_info_dict['发文字号']
paragraph_list = response.css('div.gknbxq_detail p *::text').getall()
attachment_link = response.css('div.ewebeditor_doc img::attr(src)').getall()
if len(paragraph_list) == 0:
paragraph_list = response.css('p *::text').getall()
length = len(''.join(paragraph_list))
if length > 0:
state = 'full'
with open('../../data/HTML_pk/%s/%s.pkl' % (self.name,UID), 'wb') as f:
pickle.dump(response.text,f)
with open('../../data/text/%s/%s.txt' % (self.name,UID), 'w') as f:
f.write('\n'.join(paragraph_list))
else:
state = 'empty'
return {
'UID': UID,
'FileNumber':File_num,
'mainText': paragraph_list,
'attachment_link': attachment_link,
'doc_info_dict':doc_info_dict,
'crawl state':state,
'text length':length,
}
|
start_requests
|
error.rs
|
// Copyright 2021 The BMW Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::threadpool::FuturesHolder;
use failure::{Backtrace, Context, Fail};
#[cfg(unix)]
use nix::errno::Errno;
use std::ffi::OsString;
use std::fmt;
use std::fmt::Display;
use std::num::ParseIntError;
use std::str::Utf8Error;
/// A macro that is used to lock a rwlock in write mode ignoring poison locks
/// This code was used in many places, and this macro simplifies it.
/// Base Error struct which is used throught this crate and other crates
#[derive(Debug, Fail)]
pub struct Error {
inner: Context<ErrorKind>,
}
/// Kinds of errors that can occur
#[derive(Clone, Eq, PartialEq, Debug, Fail)]
pub enum ErrorKind {
/// IOError Error
#[fail(display = "IOError Error: {}", _0)]
IOError(String),
/// Send Error
#[fail(display = "Send Error: {}", _0)]
|
InternalError(String),
/// Stale Fd
#[fail(display = "Stale Fd Error: {}", _0)]
StaleFdError(String),
/// Array Index out of bounds
#[fail(display = "ArrayIndexOutofBounds: {}", _0)]
ArrayIndexOutofBounds(String),
/// Setup Error
#[fail(display = "Setup Error: {}", _0)]
SetupError(String),
/// Log not configured
#[fail(display = "Log not configured Error: {}", _0)]
LogNotConfigured(String),
/// OsString error
#[fail(display = "OsString Error: {}", _0)]
OsStringError(String),
/// Poison error multiple locks
#[fail(display = "Poison Error: {}", _0)]
PoisonError(String),
/// Connection close
#[fail(display = "Connection Close Error: {}", _0)]
ConnectionCloseError(String),
/// Ordering Error
#[fail(display = "Ordering Error: {}", _0)]
OrderingError(String),
/// Invalid RSP (Rust Server Page)
#[fail(display = "Invalid RSP Error: {}", _0)]
InvalidRSPError(String),
/// UnexpectedData
#[fail(display = "Unexpected Data Error: {}", _0)]
UnexpectedData(String),
/// TooLargeRead
#[fail(display = "TooLargeRead Error: {}", _0)]
TooLargeRead(String),
/// CorruptedData
#[fail(display = "Corrupted Data Error: {}", _0)]
CorruptedData(String),
/// CountError
#[fail(display = "CountError: {}", _0)]
CountError(String),
/// ParseIntError
#[fail(display = "ParseIntError: {}", _0)]
ParseIntError(String),
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let cause = match self.cause() {
Some(c) => format!("{}", c),
None => String::from("Unknown"),
};
let backtrace = match self.backtrace() {
Some(b) => format!("{}", b),
None => String::from("Unknown"),
};
let output = format!(
"{} \n Cause: {} \n Backtrace: {}",
self.inner, cause, backtrace
);
Display::fmt(&output, f)
}
}
impl Error {
/// get kind
pub fn kind(&self) -> ErrorKind {
self.inner.get_context().clone()
}
/// get cause
pub fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
/// get backtrace
pub fn backtrace(&self) -> Option<&Backtrace> {
self.inner.backtrace()
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error {
Error {
inner: Context::new(kind),
}
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Error {
Error {
inner: Context::new(ErrorKind::IOError(format!("{}", e))),
}
}
}
#[cfg(unix)]
impl From<Errno> for Error {
fn from(e: Errno) -> Error {
Error {
inner: Context::new(ErrorKind::IOError(format!("{}", e))),
}
}
}
impl From<Utf8Error> for Error {
fn from(e: Utf8Error) -> Error {
Error {
inner: Context::new(ErrorKind::IOError(format!("{}", e))),
}
}
}
impl From<std::sync::mpsc::SendError<(FuturesHolder, bool)>> for Error {
fn from(e: std::sync::mpsc::SendError<(FuturesHolder, bool)>) -> Error {
Error {
inner: Context::new(ErrorKind::IOError(format!("{}", e))),
}
}
}
impl From<OsString> for Error {
fn from(e: OsString) -> Error {
Error {
inner: Context::new(ErrorKind::OsStringError(format!("{:?}", e))),
}
}
}
impl From<ParseIntError> for Error {
fn from(e: ParseIntError) -> Error {
Error {
inner: Context::new(ErrorKind::ParseIntError(format!("{}", e))),
}
}
}
|
SendError(String),
/// Internal Error
#[fail(display = "Internal Error: {}", _0)]
|
lib.rs
|
// Copyright 2015-2016 Brian Smith.
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
//! Safe, fast, small crypto using Rust with BoringSSL's cryptography
//! primitives.
//!
//! # Feature Flags
//!
//! <table>
//! <tr><th>Feature
//! <th>Description
//! <tr><td><code>alloc (default)</code>
//! <td>Enable features that require use of the heap, RSA in particular.
//! <tr><td><code>dev_urandom_fallback (default)</code>
//! <td>This is only applicable to Linux. On Linux, by default,
//! <code>ring::rand::SystemRandom</code> will fall back to reading
//! from <code>/dev/urandom</code> if the <code>getrandom()</code>
//! syscall isn't supported at runtime. When the
//! <code>dev_urandom_fallback</code> feature is disabled, such
//! fallbacks will not occur. See the documentation for
//! <code>rand::SystemRandom</code> for more details.
//! <tr><td><code>std</code>
//! <td>Enable features that use libstd, in particular
|
//! <code>std::error::Error</code> integration. Implies `alloc`.
//! <tr><td><code>wasm32_unknown_unknown_js</code>
//! <td>When this feature is enabled, for the wasm32-unknown-unknown target,
//! Web APIs will be used to implement features like `ring::rand` that
//! require an operating environment of some kind. This has no effect
//! for any other target.
//! </table>
// When running mk/package.sh, don't actually build any code.
#![cfg(not(pregenerate_asm_only))]
#![doc(html_root_url = "https://briansmith.org/rustdoc/")]
#![allow(
missing_copy_implementations,
missing_debug_implementations,
non_camel_case_types,
non_snake_case,
unsafe_code,
// Oak Note: Do not warn when building deprecated code. Ring includes
// code that is, and our CI fails on warnings
deprecated
)]
// `#[derive(...)]` uses `trivial_numeric_casts` and `unused_qualifications`
// internally.
#![deny(missing_docs, unused_qualifications, variant_size_differences)]
#![forbid(unused_results)]
#![no_std]
#[cfg(feature = "alloc")]
extern crate alloc;
#[macro_use]
mod debug;
#[macro_use]
mod prefixed;
#[macro_use]
pub mod test;
#[macro_use]
mod arithmetic;
#[macro_use]
mod bssl;
#[macro_use]
mod polyfill;
pub mod aead;
pub mod agreement;
mod bits;
pub(crate) mod c;
pub mod constant_time;
pub mod io;
mod cpu;
pub mod digest;
mod ec;
mod endian;
pub mod error;
pub mod hkdf;
pub mod hmac;
mod limb;
pub mod pbkdf2;
pub mod pkcs8;
pub mod rand;
#[cfg(feature = "alloc")]
pub mod rsa;
pub mod signature;
mod sealed {
/// Traits that are designed to only be implemented internally in *ring*.
//
// Usage:
// ```
// use crate::sealed;
//
// pub trait MyType: sealed::Sealed {
// // [...]
// }
//
// impl sealed::Sealed for MyType {}
// ```
pub trait Sealed {}
}
| |
solution.go
|
package n0305
func numIslands2(m int, n int, positions [][]int) []int {
finder := Constructor(m * n)
vis := make([]bool, m*n)
ans := []int{}
directs := [][]int{{-1, 0}, {1, 0}, {0, -1}, {0, 1}}
for _, pos := range positions {
currx, curry := pos[0], pos[1]
index := currx*n + curry
if !vis[index]
|
ans = append(ans, finder.Count)
}
return ans
}
type UnionFind struct {
Parents []int
Count int
}
func Constructor(N int) UnionFind {
finder := UnionFind{
Parents: make([]int, N),
Count: 0,
}
for i := range finder.Parents {
finder.Parents[i] = i
}
return finder
}
func (f *UnionFind) find(x int) int {
for f.Parents[x] != x {
x = f.find(f.Parents[x])
}
return f.Parents[x]
}
func (f *UnionFind) union(x, y int) {
rootx := f.find(x)
rooty := f.find(y)
if rootx == rooty {
return
}
f.Count--
f.Parents[rootx] = rooty
}
func (f *UnionFind) isConnected(x, y int) bool {
return f.find(x) == f.find(y)
}
func (f *UnionFind) addCount() {
f.Count++
}
|
{
finder.addCount()
vis[index] = true
for _, d := range directs {
newx, newy := currx+d[0], curry+d[1]
newIndex := newx*n + newy
if newx >= 0 && newy >= 0 && newx < m && newy < n && vis[newIndex] && !finder.isConnected(index, newIndex) {
finder.union(index, newIndex)
}
}
}
|
translate_v3_get_supported_languages_with_target.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START translate_v3_get_supported_languages_for_target]
from google.cloud import translate
def get_supported_languages_with_target(project_id="YOUR_PROJECT_ID"):
"""Listing supported languages with target language name."""
client = translate.TranslationServiceClient()
|
response = client.get_supported_languages(
display_language_code="is", # target language code
parent=parent
)
# List language codes of supported languages
for language in response.languages:
print(u"Language Code: {}".format(language.language_code))
print(u"Display Name: {}".format(language.display_name))
# [END translate_v3_get_supported_languages_for_target]
|
parent = client.location_path(project_id, "global")
# Supported language codes: https://cloud.google.com/translate/docs/languages
|
util.ts
|
import {InjectDefineSymbol} from "../decorators/decorators";
import {Define} from "../define/define";
export class Util {
public static getClassName(fn: Function): string {
return fn.name.charAt(0).toLowerCase() + fn.name.slice(1)
}
public static isUndefined(value: any): boolean {
return typeof value === 'undefined'
}
public static isObject(val: any): boolean {
if (val === null) {
return false;
}
return ((typeof val === 'function') || (typeof val === 'object'));
}
public static isFunction(obj: any): boolean {
return !!(obj && obj.constructor && obj.call && obj.apply);
};
public static getClassNameOrId(objectId: string | Function): string {
if (Util.isFunction(objectId)) {
objectId = Util.getClassName(objectId as Function);
}
return objectId as string;
}
public static isClass(v: any): boolean {
return typeof v === 'function' && v.name && /^\s*class\s+/.test(v.toString());
}
public static getClassDefinition(fn: any): Define {
return Util.getReflectData<Define>(InjectDefineSymbol, fn)
}
public static isString(str: any): boolean {
return (typeof str === 'string' || str instanceof String);
}
public static keyBy<T extends object>(arr: T[], key: string | ((item: T, index: number) => string)) {
let output: { [index: string]: T } = {};
for (let i = 0, len = (arr || []).length; i < len; i++) {
let item: any = arr[i];
let outputKey = Util.isFunction(key) ? (key as Function)(item, i) : item[key as string];
output[outputKey] = item;
}
return output;
}
public static keyByMap<T extends object,K extends any=string>(arr: T[], key: string | ((item: T, index: number) => string)):Map<K,T> {
let output = new Map<K,T>()
for (let i = 0, len = (arr || []).length; i < len; i++) {
let item: any = arr[i];
let outputKey = Util.isFunction(key) ? (key as Function)(item, i) : item[key as string];
output.set(outputKey,item)
}
return output;
}
public static removeFromArray<T>(list: T[], item: T): void {
if (!list || !list.length) {
return;
}
for (let i = list.length - 1; i >= 0; i--) {
if (list[i] === item) {
list.splice(i, 1);
}
}
}
public static groupByArray<T>(arr: T[], key: string | number | ((item: T) => string | number)): { [index: string]: T[] } {
let output: { [index: string]: T[] } = {};
for (let i = 0, len = arr.length; i < len; i++) {
let item = arr[i],
value = (typeof key === "function") ? key(item) : item[key],
dto = output[value] || (output[value] = []);
dto.push(item);
}
return output;
}
public static getClassId(fn: any): string {
if (!fn) {
return null;
}
if (Util.isString(fn)) {
return fn
}
let define = Util.getClassDefinition(fn);
if (define) {
return define.definition.id;
}
if (Util.isClass(fn)) {
return Util.getClassName(fn);
}
return null;
}
public static getFunctionArgs(func: (...args: any[]) => any) {
const STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg;
const ARGUMENT_NAMES = /([^\s,]+)/g;
let fnStr = func.toString().replace(STRIP_COMMENTS, '');
let args = fnStr.slice(fnStr.indexOf('(') + 1, fnStr.indexOf(')')).match(ARGUMENT_NAMES);
if (args === null) {
args = [];
}
|
args = Util.compactArray(args);
return args;
}
public static compactArray<T>(array: T[]): T[] {
let index = -1,
length = array == null ? 0 : array.length,
resIndex = 0,
result = [];
while (++index < length) {
let value = array[index];
if (value) {
result[resIndex++] = value;
}
}
return result;
}
public static getReflectData<T>(symbol: Symbol | string, klass, defaultValue?: T): T {
let value = Reflect.getOwnMetadata(symbol, klass);
if (!value && Reflect.hasMetadata(symbol, klass)) {
value = Util.cloneDeep(Reflect.getMetadata(symbol, klass));
Reflect.defineMetadata(symbol, value, klass);
}
if (!value && defaultValue != undefined) {
value = defaultValue;
Reflect.defineMetadata(symbol, value, klass);
}
return value
}
public static cloneDeep<T>(obj: T): T {
if (!obj) {
return;
}
let output = Array.isArray(obj) ? [] : {};
let keys = Object.keys(obj);
for (let i = 0, len = keys.length; i < len; i++) {
let key = keys[i], value = obj[key];
output[key] = (value == null || typeof value != "object") ? value : Util.cloneDeep(value)
}
return output as any;
}
public static mapPush(map: { [index: string]: Object[] }, key: string, obj: Object): void {
(!map[key]) && (map[key] = []);
map[key].push(obj);
}
public static createDelegate(fn: Function, obj: any, args: any[]): Function {
return function () {
let callArgs = (args || []).concat(arguments);
return fn.apply(obj, callArgs);
};
}
public static regroupByParallel<T>(arr: T[], fn: (item: T) => boolean): T[][] {
let output: T[][] = [];
for (let i = 0, len = arr ? arr.length : 0; i < len; i++) {
let item = arr[i], lastItemArr = output[output.length - 1];
if (fn(item) && lastItemArr && lastItemArr.length && fn(lastItemArr[0])) {
lastItemArr.push(item)
} else {
output.push([item])
}
}
return output;
}
public static async runRegroupByParallel<T>(arr: T[], fn: (item: T) => boolean, runFn: (item: T) => Promise<any>): Promise<void> {
let itemsArr = Util.regroupByParallel(arr, fn);
for (let i = 0, len = (itemsArr || []).length; i < len; i++) {
let items = itemsArr[i];
let promises = (items || []).map(item => runFn(item));
await Promise.all(promises)
}
}
}
| |
carbon-icons-react-lib-send--filled-32-31d706ffb4f1530961d5.js
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[1554],{"0sHi":function(e,t,r){"use strict";var n=r("VHp3");r("O9pe"),r("17x9");var o,c=(o=r("q1tI"))&&"object"==typeof o&&"default"in o?o.default:o,i=c.createElement("path",{d:"M27.45,15.11l-22-11a1,1,0,0,0-1.08.12,1,1,0,0,0-.33,1L6.69,15H18v2H6.69L4,26.74A1,1,0,0,0,5,28a1,1,0,0,0,.45-.11l22-11a1,1,0,0,0,0-1.78Z"}),a=c.forwardRef((function(e,t){var r=e.children,o=n._objectWithoutProperties(e,["children"]);return c.createElement(n.Icon,n._extends({width:32,height:32,viewBox:"0 0 32 32",xmlns:"http://www.w3.org/2000/svg",fill:"currentColor",ref:t},o),i,r)}));e.exports=a},VHp3:function(e,t,r){"use strict";var n,o=r("O9pe"),c=(n=r("q1tI"))&&"object"==typeof n&&"default"in n?n.default:n;function i(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(){return(a=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var r=arguments[t];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(e[n]=r[n])}return e}).apply(this,arguments)}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function p(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?l(Object(r),!0).forEach((function(t){i(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):l(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function s(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},c=Object.keys(e);for(n=0;n<c.length;n++)r=c[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var c=Object.getOwnPropertySymbols(e);for(n=0;n<c.length;n++)r=c[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var u=c.forwardRef((function(e,t){var r=e.className,n=e.children,i=e.tabIndex,a=s(e,["className","children","tabIndex"]),l=o.getAttributes(p(p({},a),{},{tabindex:i})),u=l.tabindex,f=s(l,["tabindex"]);return r&&(f.className=r),null!=u&&(f.tabIndex=u),t&&(f.ref=t),c.createElement("svg",f,n)}));u.displayName="Icon",u.defaultProps={xmlns:"http://www.w3.org/2000/svg",preserveAspectRatio:"xMidYMid meet"},t.Icon=u,t._extends=a,t._objectWithoutProperties=s}}]);
|
||
group_list_view.py
|
from django_filters.views import FilterView
from django_tables2.views import SingleTableMixin
from django_tables2 import tables, TemplateColumn
from django.contrib.auth.models import Group
from guardian.mixins import LoginRequiredMixin
from profiles.filters.group_filter import GroupFilter
class GroupTable(tables.Table):
actions = TemplateColumn(template_name='custom_columns/group_actions.html', orderable=False)
users = TemplateColumn(template_name='custom_columns/group_users.html', orderable=False)
class Meta:
model = Group
attrs = {"id": "group_table", "class": "table squest-pagination-tables "}
fields = ("name", "users", "actions")
class GroupListView(LoginRequiredMixin, SingleTableMixin, FilterView):
|
table_pagination = {'per_page': 10}
table_class = GroupTable
model = Group
template_name = 'generics/list.html'
filterset_class = GroupFilter
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = "Groups"
context['html_button_path'] = "generics/buttons/add_group.html"
context['object_name'] = 'group'
return context
|
|
interrupt.rs
|
//! Process-global interrupt handling
//!
//! This module contains facilities to globally request an interrupt, which will cause supporting computations to
//! abort once it is observed.
//! Such checks for interrupts are provided in custom implementations of various traits to transparently add interrupt
//! support to methods who wouldn't otherwise by injecting it. see [`Read`].
#[cfg(all(feature = "interrupt-handler", not(feature = "disable-interrupts")))]
mod _impl {
use std::{
io,
sync::atomic::{AtomicUsize, Ordering},
};
|
/// Initialize a signal handler to listen to SIGINT and SIGTERM and trigger our [`trigger()`][super::trigger()] that way.
///
/// When `Ctrl+C` is pressed, a message will be sent to `message_channel` to inform the user about it being registered, after all
/// actually responding to it is implementation dependent and might thus take some time (or not work at all).
///
/// # Note
///
/// This implementation is available only with the **interrupt-handler** feature toggle with the **disable-interrupts** feature disabled.
pub fn init_handler(mut message_channel: impl io::Write + Send + 'static) {
ctrlc::set_handler(move || {
const MESSAGES: &[&str] = &[
"interrupt requested",
"please wait…",
"the program will respond soon…",
"if the program doesn't respond quickly enough, please let us know here: https://github.com/Byron/gitoxide/issues"
];
static CURRENT_MESSAGE: AtomicUsize = AtomicUsize::new(0);
if !super::is_triggered() {
CURRENT_MESSAGE.store(0, Ordering::Relaxed);
}
let msg_idx = CURRENT_MESSAGE.fetch_add(1, Ordering::Relaxed);
super::IS_INTERRUPTED.store(true, Ordering::Relaxed);
writeln!(message_channel, "{}", MESSAGES[msg_idx % MESSAGES.len()]).ok();
})
.expect("it is up to the application to ensure only one interrupt handler is installed, and this function is called only once.")
}
}
use std::io;
#[cfg(not(feature = "disable-interrupts"))]
use std::sync::atomic::{AtomicBool, Ordering};
#[cfg(any(feature = "disable-interrupts", not(feature = "interrupt-handler")))]
mod _impl {
use std::io;
/// Does nothing, as the **disable-interrupts** feature is enabled while the **interrupt-handler** feature is not present.
pub fn init_handler(_message_channel: impl io::Write + Send + 'static) {}
}
pub use _impl::init_handler;
/// A wrapper for implementors of [`std::io::Read`] or [`std::io::BufRead`] with interrupt support.
///
/// It fails a [read][`std::io::Read::read`] while an interrupt was requested.
pub struct Read<R> {
/// The actual implementor of [`std::io::Read`] to which interrupt support will be added.
pub inner: R,
}
impl<R> io::Read for Read<R>
where
R: io::Read,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if is_triggered() {
return Err(io::Error::new(io::ErrorKind::Other, "interrupted by user"));
}
self.inner.read(buf)
}
}
impl<R> io::BufRead for Read<R>
where
R: io::BufRead,
{
fn fill_buf(&mut self) -> io::Result<&[u8]> {
self.inner.fill_buf()
}
fn consume(&mut self, amt: usize) {
self.inner.consume(amt)
}
}
#[cfg(not(feature = "disable-interrupts"))]
static IS_INTERRUPTED: AtomicBool = AtomicBool::new(false);
/// Returns true if an interrupt is requested.
///
/// Only implemented if the **disable-interrupts** feature toggle is not present.
#[cfg(not(feature = "disable-interrupts"))]
pub fn is_triggered() -> bool {
IS_INTERRUPTED.load(Ordering::Relaxed)
}
/// Returns always false if the **disable-interrupts** feature is present.
#[cfg(feature = "disable-interrupts")]
pub fn is_triggered() -> bool {
false
}
/// Trigger an interrupt, signalling to those checking for [`is_triggered()`] to stop what they are doing.
///
/// # Note
/// Only effective if the **disable-interrupts** feature is **not** present.
pub fn trigger() {
#[cfg(not(feature = "disable-interrupts"))]
IS_INTERRUPTED.store(true, Ordering::Relaxed);
}
/// Sets the interrupt request to false, thus allowing those checking for [`is_triggered()`] to proceed.
///
/// Call this in code that is able to trigger an interrupt.
/// This may also be performed by the [`ResetOnDrop`] helper to assure the trigger state is returned
/// to its original state.
///
/// # Note
/// Only effective if the **disable-interrupts** feature is **not** present.
pub fn reset() {
#[cfg(not(feature = "disable-interrupts"))]
IS_INTERRUPTED.store(false, Ordering::Relaxed);
}
/// Useful if some parts of the program set the interrupt programmatically to cause others to stop, while
/// assuring the interrupt state is reset at the end of the function to avoid other side-effects.
///
/// Note that this is inherently racy and that this will only work deterministically if there is only one
/// top-level function running in a process.
pub struct ResetOnDrop {
was_interrupted: bool,
}
impl Default for ResetOnDrop {
fn default() -> Self {
ResetOnDrop {
was_interrupted: is_triggered(),
}
}
}
impl Drop for ResetOnDrop {
fn drop(&mut self) {
if self.was_interrupted {
trigger()
} else {
reset()
}
}
}
| |
process_launchers.py
|
import logging
import os
from collections import OrderedDict
from pathlib import Path
import torch
import itertools
import detectron2.utils.comm as comm
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.data import MetadataCatalog
from detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch
from detectron2.evaluation import (
CityscapesInstanceEvaluator,
CityscapesSemSegEvaluator,
COCOEvaluator,
COCOPanopticEvaluator,
DatasetEvaluators,
LVISEvaluator,
PascalVOCDetectionEvaluator,
SemSegEvaluator,
verify_results,
)
from detectron2.modeling import GeneralizedRCNNWithTTA
from tools.train_net import build_evaluator, Trainer, maskr_setup, retina_setup
def maskr_test_main_100(args):
cfg = maskr_setup(args)
cfg.defrost()
cfg.INPUT.WINDOW = (0, 1)
cfg.MODEL.WEIGHTS = 'maskr_final_100.pth'
cfg.OUTPUT_DIR = './maskr_outputs_100'
cfg.freeze()
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=args.output_dir).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.retrain)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
def maskr_test_main_995(args):
cfg = maskr_setup(args)
cfg.defrost()
cfg.INPUT.WINDOW = (0.005, 0.995)
cfg.MODEL.WEIGHTS = 'maskr_final_995.pth'
cfg.OUTPUT_DIR = './maskr_outputs_995'
cfg.freeze()
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=args.output_dir).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.retrain)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
def maskr_test_main_99(args):
cfg = maskr_setup(args)
cfg.defrost()
cfg.INPUT.WINDOW = (0.01, 0.99)
cfg.MODEL.WEIGHTS = 'maskr_final_99.pth'
cfg.OUTPUT_DIR = './maskr_outputs_99'
cfg.freeze()
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=args.output_dir).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.retrain)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
def retina_test_main_100(args):
cfg = retina_setup(args)
cfg.defrost()
cfg.INPUT.WINDOW = (0, 1)
cfg.MODEL.WEIGHTS = 'retina_final_100.pth'
cfg.OUTPUT_DIR = './retina_outputs_100'
cfg.freeze()
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=args.output_dir).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.retrain)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
def retina_test_main_995(args):
cfg = retina_setup(args)
cfg.defrost()
cfg.INPUT.WINDOW = (0.005, 0.995)
cfg.MODEL.WEIGHTS = 'retina_final_995.pth'
cfg.OUTPUT_DIR = './retina_outputs_995'
cfg.freeze()
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=args.output_dir).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.retrain)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
def
|
(args):
cfg = retina_setup(args)
cfg.defrost()
cfg.INPUT.WINDOW = (0.01, 0.99)
cfg.MODEL.WEIGHTS = 'retina_final_99.pth'
cfg.OUTPUT_DIR = './retina_outputs_99'
cfg.freeze()
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=args.output_dir).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.retrain)
res = Trainer.test(cfg, model)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
if comm.is_main_process():
verify_results(cfg, res)
return res
def train_main_maskr(args):
cfg = maskr_setup(args)
"""
If you'd like to do anything fancier than the standard training logic,
consider writing your own training loop (see plain_train_net.py) or
subclassing the trainer.
"""
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.retrain)
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]
)
return trainer.train()
def train_main_retina(args):
cfg = retina_setup(args)
"""
If you'd like to do anything fancier than the standard training logic,
consider writing your own training loop (see plain_train_net.py) or
subclassing the trainer.
"""
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.retrain)
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]
)
return trainer.train()
|
retina_test_main_99
|
EventDefinition.rs
|
#![allow(unused_imports, non_camel_case_types)]
use crate::models::r4::CodeableConcept::CodeableConcept;
use crate::models::r4::ContactDetail::ContactDetail;
use crate::models::r4::Element::Element;
use crate::models::r4::Extension::Extension;
use crate::models::r4::Identifier::Identifier;
use crate::models::r4::Meta::Meta;
use crate::models::r4::Narrative::Narrative;
use crate::models::r4::Period::Period;
use crate::models::r4::Reference::Reference;
use crate::models::r4::RelatedArtifact::RelatedArtifact;
use crate::models::r4::ResourceList::ResourceList;
use crate::models::r4::TriggerDefinition::TriggerDefinition;
use crate::models::r4::UsageContext::UsageContext;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// The EventDefinition resource provides a reusable description of when a particular
/// event can occur.
#[derive(Debug)]
pub struct EventDefinition<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl EventDefinition<'_> {
pub fn new(value: &Value) -> EventDefinition {
EventDefinition {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// Extensions for approvalDate
pub fn _approval_date(&self) -> Option<Element> {
if let Some(val) = self.value.get("_approvalDate") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for copyright
pub fn _copyright(&self) -> Option<Element> {
if let Some(val) = self.value.get("_copyright") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for date
pub fn _date(&self) -> Option<Element> {
if let Some(val) = self.value.get("_date") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for description
pub fn _description(&self) -> Option<Element> {
if let Some(val) = self.value.get("_description") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for experimental
pub fn _experimental(&self) -> Option<Element> {
if let Some(val) = self.value.get("_experimental") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for implicitRules
pub fn _implicit_rules(&self) -> Option<Element> {
if let Some(val) = self.value.get("_implicitRules") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for language
pub fn _language(&self) -> Option<Element> {
if let Some(val) = self.value.get("_language") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for lastReviewDate
pub fn _last_review_date(&self) -> Option<Element> {
if let Some(val) = self.value.get("_lastReviewDate") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for name
pub fn _name(&self) -> Option<Element> {
if let Some(val) = self.value.get("_name") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for publisher
pub fn _publisher(&self) -> Option<Element> {
if let Some(val) = self.value.get("_publisher") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for purpose
pub fn _purpose(&self) -> Option<Element> {
if let Some(val) = self.value.get("_purpose") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for status
pub fn _status(&self) -> Option<Element> {
if let Some(val) = self.value.get("_status") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for subtitle
pub fn _subtitle(&self) -> Option<Element> {
if let Some(val) = self.value.get("_subtitle") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for title
pub fn _title(&self) -> Option<Element> {
if let Some(val) = self.value.get("_title") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for url
pub fn _url(&self) -> Option<Element> {
if let Some(val) = self.value.get("_url") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for usage
pub fn _usage(&self) -> Option<Element> {
if let Some(val) = self.value.get("_usage") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for version
pub fn _version(&self) -> Option<Element> {
if let Some(val) = self.value.get("_version") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The date on which the resource content was approved by the publisher. Approval
/// happens once when the content is officially approved for usage.
pub fn approval_date(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("approvalDate") {
return Some(string);
}
return None;
}
/// An individiual or organization primarily involved in the creation and maintenance
/// of the content.
pub fn author(&self) -> Option<Vec<ContactDetail>> {
if let Some(Value::Array(val)) = self.value.get("author") {
return Some(
val.into_iter()
.map(|e| ContactDetail {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Contact details to assist a user in finding and communicating with the publisher.
pub fn contact(&self) -> Option<Vec<ContactDetail>> {
if let Some(Value::Array(val)) = self.value.get("contact") {
return Some(
val.into_iter()
.map(|e| ContactDetail {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// These resources do not have an independent existence apart from the resource that
/// contains them - they cannot be identified independently, and nor can they have
/// their own independent transaction scope.
pub fn contained(&self) -> Option<Vec<ResourceList>> {
if let Some(Value::Array(val)) = self.value.get("contained") {
return Some(
val.into_iter()
.map(|e| ResourceList {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A copyright statement relating to the event definition and/or its contents.
/// Copyright statements are generally legal restrictions on the use and publishing of
/// the event definition.
pub fn copyright(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("copyright") {
return Some(string);
}
return None;
}
/// The date (and optionally time) when the event definition was published. The date
/// must change when the business version changes and it must change if the status
/// code changes. In addition, it should change when the substantive content of the
/// event definition changes.
pub fn date(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("date") {
return Some(string);
}
return None;
}
/// A free text natural language description of the event definition from a consumer's
/// perspective.
pub fn description(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("description") {
return Some(string);
}
return None;
}
/// An individual or organization primarily responsible for internal coherence of
/// the content.
pub fn editor(&self) -> Option<Vec<ContactDetail>>
|
/// The period during which the event definition content was or is planned to be in
/// active use.
pub fn effective_period(&self) -> Option<Period> {
if let Some(val) = self.value.get("effectivePeriod") {
return Some(Period {
value: Cow::Borrowed(val),
});
}
return None;
}
/// An individual or organization responsible for officially endorsing the content for
/// use in some setting.
pub fn endorser(&self) -> Option<Vec<ContactDetail>> {
if let Some(Value::Array(val)) = self.value.get("endorser") {
return Some(
val.into_iter()
.map(|e| ContactDetail {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A Boolean value to indicate that this event definition is authored for testing
/// purposes (or education/evaluation/marketing) and is not intended to be used for
/// genuine usage.
pub fn experimental(&self) -> Option<bool> {
if let Some(val) = self.value.get("experimental") {
return Some(val.as_bool().unwrap());
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the resource. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The logical id of the resource, as used in the URL for the resource. Once
/// assigned, this value never changes.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// A formal identifier that is used to identify this event definition when it is
/// represented in other formats, or referenced in a specification, model, design or
/// an instance.
pub fn identifier(&self) -> Option<Vec<Identifier>> {
if let Some(Value::Array(val)) = self.value.get("identifier") {
return Some(
val.into_iter()
.map(|e| Identifier {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A reference to a set of rules that were followed when the resource was
/// constructed, and which must be understood when processing the content. Often, this
/// is a reference to an implementation guide that defines the special rules along
/// with other profiles etc.
pub fn implicit_rules(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("implicitRules") {
return Some(string);
}
return None;
}
/// A legal or geographic region in which the event definition is intended to be used.
pub fn jurisdiction(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("jurisdiction") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The base language in which the resource is written.
pub fn language(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("language") {
return Some(string);
}
return None;
}
/// The date on which the resource content was last reviewed. Review happens
/// periodically after approval but does not change the original approval date.
pub fn last_review_date(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("lastReviewDate") {
return Some(string);
}
return None;
}
/// The metadata about the resource. This is content that is maintained by the
/// infrastructure. Changes to the content might not always be associated with version
/// changes to the resource.
pub fn meta(&self) -> Option<Meta> {
if let Some(val) = self.value.get("meta") {
return Some(Meta {
value: Cow::Borrowed(val),
});
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the resource and that modifies the understanding of the element
/// that contains it and/or the understanding of the containing element's descendants.
/// Usually modifier elements provide negation or qualification. To make the use of
/// extensions safe and manageable, there is a strict set of governance applied to
/// the definition and use of extensions. Though any implementer is allowed to define
/// an extension, there is a set of requirements that SHALL be met as part of the
/// definition of the extension. Applications processing a resource are required to
/// check for modifier extensions. Modifier extensions SHALL NOT change the meaning
/// of any elements on Resource or DomainResource (including cannot change the meaning
/// of modifierExtension itself).
pub fn modifier_extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("modifierExtension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A natural language name identifying the event definition. This name should be
/// usable as an identifier for the module by machine processing applications such as
/// code generation.
pub fn name(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("name") {
return Some(string);
}
return None;
}
/// The name of the organization or individual that published the event definition.
pub fn publisher(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("publisher") {
return Some(string);
}
return None;
}
/// Explanation of why this event definition is needed and why it has been designed as
/// it has.
pub fn purpose(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("purpose") {
return Some(string);
}
return None;
}
/// Related resources such as additional documentation, justification, or
/// bibliographic references.
pub fn related_artifact(&self) -> Option<Vec<RelatedArtifact>> {
if let Some(Value::Array(val)) = self.value.get("relatedArtifact") {
return Some(
val.into_iter()
.map(|e| RelatedArtifact {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// An individual or organization primarily responsible for review of some aspect of
/// the content.
pub fn reviewer(&self) -> Option<Vec<ContactDetail>> {
if let Some(Value::Array(val)) = self.value.get("reviewer") {
return Some(
val.into_iter()
.map(|e| ContactDetail {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The status of this event definition. Enables tracking the life-cycle of the
/// content.
pub fn status(&self) -> Option<EventDefinitionStatus> {
if let Some(Value::String(val)) = self.value.get("status") {
return Some(EventDefinitionStatus::from_string(&val).unwrap());
}
return None;
}
/// A code or group definition that describes the intended subject of the event
/// definition.
pub fn subject_codeable_concept(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("subjectCodeableConcept") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// A code or group definition that describes the intended subject of the event
/// definition.
pub fn subject_reference(&self) -> Option<Reference> {
if let Some(val) = self.value.get("subjectReference") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// An explanatory or alternate title for the event definition giving additional
/// information about its content.
pub fn subtitle(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("subtitle") {
return Some(string);
}
return None;
}
/// A human-readable narrative that contains a summary of the resource and can be used
/// to represent the content of the resource to a human. The narrative need not encode
/// all the structured data, but is required to contain sufficient detail to make it
/// "clinically safe" for a human to just read the narrative. Resource definitions
/// may define what content should be represented in the narrative to ensure clinical
/// safety.
pub fn text(&self) -> Option<Narrative> {
if let Some(val) = self.value.get("text") {
return Some(Narrative {
value: Cow::Borrowed(val),
});
}
return None;
}
/// A short, descriptive, user-friendly title for the event definition.
pub fn title(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("title") {
return Some(string);
}
return None;
}
/// Descriptive topics related to the module. Topics provide a high-level
/// categorization of the module that can be useful for filtering and searching.
pub fn topic(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("topic") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The trigger element defines when the event occurs. If more than one trigger
/// condition is specified, the event fires whenever any one of the trigger conditions
/// is met.
pub fn trigger(&self) -> Vec<TriggerDefinition> {
self.value
.get("trigger")
.unwrap()
.as_array()
.unwrap()
.into_iter()
.map(|e| TriggerDefinition {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>()
}
/// An absolute URI that is used to identify this event definition when it is
/// referenced in a specification, model, design or an instance; also called its
/// canonical identifier. This SHOULD be globally unique and SHOULD be a literal
/// address at which at which an authoritative instance of this event definition is
/// (or will be) published. This URL can be the target of a canonical reference. It
/// SHALL remain the same when the event definition is stored on different servers.
pub fn url(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("url") {
return Some(string);
}
return None;
}
/// A detailed description of how the event definition is used from a clinical
/// perspective.
pub fn usage(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("usage") {
return Some(string);
}
return None;
}
/// The content was developed with a focus and intent of supporting the contexts that
/// are listed. These contexts may be general categories (gender, age, ...) or may be
/// references to specific programs (insurance plans, studies, ...) and may be used to
/// assist with indexing and searching for appropriate event definition instances.
pub fn use_context(&self) -> Option<Vec<UsageContext>> {
if let Some(Value::Array(val)) = self.value.get("useContext") {
return Some(
val.into_iter()
.map(|e| UsageContext {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The identifier that is used to identify this version of the event definition
/// when it is referenced in a specification, model, design or instance. This is an
/// arbitrary value managed by the event definition author and is not expected to be
/// globally unique. For example, it might be a timestamp (e.g. yyyymmdd) if a managed
/// version is not available. There is also no expectation that versions can be placed
/// in a lexicographical sequence.
pub fn version(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("version") {
return Some(string);
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self._approval_date() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._copyright() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._date() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._description() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._experimental() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._implicit_rules() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._language() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._last_review_date() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._name() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._publisher() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._purpose() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._status() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._subtitle() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._title() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._url() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._usage() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._version() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.approval_date() {}
if let Some(_val) = self.author() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.contact() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.contained() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.copyright() {}
if let Some(_val) = self.date() {}
if let Some(_val) = self.description() {}
if let Some(_val) = self.editor() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.effective_period() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.endorser() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.experimental() {}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.identifier() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.implicit_rules() {}
if let Some(_val) = self.jurisdiction() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.language() {}
if let Some(_val) = self.last_review_date() {}
if let Some(_val) = self.meta() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.modifier_extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.name() {}
if let Some(_val) = self.publisher() {}
if let Some(_val) = self.purpose() {}
if let Some(_val) = self.related_artifact() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.reviewer() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.status() {}
if let Some(_val) = self.subject_codeable_concept() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.subject_reference() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.subtitle() {}
if let Some(_val) = self.text() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.title() {}
if let Some(_val) = self.topic() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if !self
.trigger()
.into_iter()
.map(|e| e.validate())
.all(|x| x == true)
{
return false;
}
if let Some(_val) = self.url() {}
if let Some(_val) = self.usage() {}
if let Some(_val) = self.use_context() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.version() {}
return true;
}
}
#[derive(Debug)]
pub struct EventDefinitionBuilder {
pub(crate) value: Value,
}
impl EventDefinitionBuilder {
pub fn build(&self) -> EventDefinition {
EventDefinition {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(existing: EventDefinition) -> EventDefinitionBuilder {
EventDefinitionBuilder {
value: (*existing.value).clone(),
}
}
pub fn new(trigger: Vec<TriggerDefinition>) -> EventDefinitionBuilder {
let mut __value: Value = json!({});
__value["trigger"] = json!(trigger.into_iter().map(|e| e.value).collect::<Vec<_>>());
return EventDefinitionBuilder { value: __value };
}
pub fn _approval_date<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_approvalDate"] = json!(val.value);
return self;
}
pub fn _copyright<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_copyright"] = json!(val.value);
return self;
}
pub fn _date<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_date"] = json!(val.value);
return self;
}
pub fn _description<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_description"] = json!(val.value);
return self;
}
pub fn _experimental<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_experimental"] = json!(val.value);
return self;
}
pub fn _implicit_rules<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_implicitRules"] = json!(val.value);
return self;
}
pub fn _language<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_language"] = json!(val.value);
return self;
}
pub fn _last_review_date<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_lastReviewDate"] = json!(val.value);
return self;
}
pub fn _name<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_name"] = json!(val.value);
return self;
}
pub fn _publisher<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_publisher"] = json!(val.value);
return self;
}
pub fn _purpose<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_purpose"] = json!(val.value);
return self;
}
pub fn _status<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_status"] = json!(val.value);
return self;
}
pub fn _subtitle<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_subtitle"] = json!(val.value);
return self;
}
pub fn _title<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_title"] = json!(val.value);
return self;
}
pub fn _url<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_url"] = json!(val.value);
return self;
}
pub fn _usage<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_usage"] = json!(val.value);
return self;
}
pub fn _version<'a>(&'a mut self, val: Element) -> &'a mut EventDefinitionBuilder {
self.value["_version"] = json!(val.value);
return self;
}
pub fn approval_date<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["approvalDate"] = json!(val);
return self;
}
pub fn author<'a>(&'a mut self, val: Vec<ContactDetail>) -> &'a mut EventDefinitionBuilder {
self.value["author"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn contact<'a>(&'a mut self, val: Vec<ContactDetail>) -> &'a mut EventDefinitionBuilder {
self.value["contact"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn contained<'a>(&'a mut self, val: Vec<ResourceList>) -> &'a mut EventDefinitionBuilder {
self.value["contained"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn copyright<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["copyright"] = json!(val);
return self;
}
pub fn date<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["date"] = json!(val);
return self;
}
pub fn description<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["description"] = json!(val);
return self;
}
pub fn editor<'a>(&'a mut self, val: Vec<ContactDetail>) -> &'a mut EventDefinitionBuilder {
self.value["editor"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn effective_period<'a>(&'a mut self, val: Period) -> &'a mut EventDefinitionBuilder {
self.value["effectivePeriod"] = json!(val.value);
return self;
}
pub fn endorser<'a>(&'a mut self, val: Vec<ContactDetail>) -> &'a mut EventDefinitionBuilder {
self.value["endorser"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn experimental<'a>(&'a mut self, val: bool) -> &'a mut EventDefinitionBuilder {
self.value["experimental"] = json!(val);
return self;
}
pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut EventDefinitionBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn identifier<'a>(&'a mut self, val: Vec<Identifier>) -> &'a mut EventDefinitionBuilder {
self.value["identifier"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn implicit_rules<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["implicitRules"] = json!(val);
return self;
}
pub fn jurisdiction<'a>(
&'a mut self,
val: Vec<CodeableConcept>,
) -> &'a mut EventDefinitionBuilder {
self.value["jurisdiction"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn language<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["language"] = json!(val);
return self;
}
pub fn last_review_date<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["lastReviewDate"] = json!(val);
return self;
}
pub fn meta<'a>(&'a mut self, val: Meta) -> &'a mut EventDefinitionBuilder {
self.value["meta"] = json!(val.value);
return self;
}
pub fn modifier_extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut EventDefinitionBuilder {
self.value["modifierExtension"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn name<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["name"] = json!(val);
return self;
}
pub fn publisher<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["publisher"] = json!(val);
return self;
}
pub fn purpose<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["purpose"] = json!(val);
return self;
}
pub fn related_artifact<'a>(
&'a mut self,
val: Vec<RelatedArtifact>,
) -> &'a mut EventDefinitionBuilder {
self.value["relatedArtifact"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn reviewer<'a>(&'a mut self, val: Vec<ContactDetail>) -> &'a mut EventDefinitionBuilder {
self.value["reviewer"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn status<'a>(&'a mut self, val: EventDefinitionStatus) -> &'a mut EventDefinitionBuilder {
self.value["status"] = json!(val.to_string());
return self;
}
pub fn subject_codeable_concept<'a>(
&'a mut self,
val: CodeableConcept,
) -> &'a mut EventDefinitionBuilder {
self.value["subjectCodeableConcept"] = json!(val.value);
return self;
}
pub fn subject_reference<'a>(&'a mut self, val: Reference) -> &'a mut EventDefinitionBuilder {
self.value["subjectReference"] = json!(val.value);
return self;
}
pub fn subtitle<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["subtitle"] = json!(val);
return self;
}
pub fn text<'a>(&'a mut self, val: Narrative) -> &'a mut EventDefinitionBuilder {
self.value["text"] = json!(val.value);
return self;
}
pub fn title<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["title"] = json!(val);
return self;
}
pub fn topic<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut EventDefinitionBuilder {
self.value["topic"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn url<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["url"] = json!(val);
return self;
}
pub fn usage<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["usage"] = json!(val);
return self;
}
pub fn use_context<'a>(&'a mut self, val: Vec<UsageContext>) -> &'a mut EventDefinitionBuilder {
self.value["useContext"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn version<'a>(&'a mut self, val: &str) -> &'a mut EventDefinitionBuilder {
self.value["version"] = json!(val);
return self;
}
}
#[derive(Debug)]
pub enum EventDefinitionStatus {
Draft,
Active,
Retired,
Unknown,
}
impl EventDefinitionStatus {
pub fn from_string(string: &str) -> Option<EventDefinitionStatus> {
match string {
"draft" => Some(EventDefinitionStatus::Draft),
"active" => Some(EventDefinitionStatus::Active),
"retired" => Some(EventDefinitionStatus::Retired),
"unknown" => Some(EventDefinitionStatus::Unknown),
_ => None,
}
}
pub fn to_string(&self) -> String {
match self {
EventDefinitionStatus::Draft => "draft".to_string(),
EventDefinitionStatus::Active => "active".to_string(),
EventDefinitionStatus::Retired => "retired".to_string(),
EventDefinitionStatus::Unknown => "unknown".to_string(),
}
}
}
|
{
if let Some(Value::Array(val)) = self.value.get("editor") {
return Some(
val.into_iter()
.map(|e| ContactDetail {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
|
spawn-types.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(non_camel_case_types)]
// ignore-emscripten no threads support
/*
Make sure we can spawn tasks that take different types of
parameters. This is based on a test case for #520 provided by Rob
Arnold.
*/
use std::thread;
use std::sync::mpsc::{channel, Sender};
type ctx = Sender<isize>;
fn iotask(_tx: &ctx, ip: String) {
assert_eq!(ip, "localhost".to_string());
}
pub fn
|
() {
let (tx, _rx) = channel::<isize>();
let t = thread::spawn(move|| iotask(&tx, "localhost".to_string()) );
t.join().ok().unwrap();
}
|
main
|
PUTCONFIGTOOSS.py
|
#!/usr/bin/env python
import oss2
class PUTCONFIGTOOSS(object):
|
def __init__(self,DEST,STORESIZE):
self.AK = ''
self.SK = ''
self.ENDPOINT = 'http://oss-cn-hangzhou.aliyuncs.com'
self.BUCKET = 'ali-hangzhou'
self.DEST = DEST
self.STORESIZE = STORESIZE
def INITIAL(self):
try:
AUTH = oss2.Auth(self.AK,self.SK)
BUCKETS = oss2.Bucket(AUTH, self.ENDPOINT,self.BUCKET)
SYSOBJS = '{0}/sys.properties'.format(self.DEST)
if self.STORESIZE == "less-than-30":
OBJECTS = '{0}/local_job.cfg'.format(self.DEST)
else:
OBJECTS = '{0}/{1}.cfg'.format(self.DEST,self.DEST)
with open('/tmp/{0}.cfg'.format(self.DEST), 'rb') as FILEOBJ:
OSSRESP = BUCKETS.put_object(OBJECTS, FILEOBJ)
with open('/tmp/{0}.properties'.format(self.DEST), 'rb') as SYSOBJ:
SYSRESP = BUCKETS.put_object(SYSOBJS, SYSOBJ)
CFGDOWN = 'http://{0}.oss-cn-hangzhou.aliyuncs.com/{1}'.format(self.BUCKET,OBJECTS)
SYSDOWN = 'http://{0}.oss-cn-hangzhou.aliyuncs.com/{1}'.format(self.BUCKET,SYSOBJS)
return CFGDOWN,SYSDOWN
except Exception:
print(e)
return e
|
|
addCollaboratorRequest.ts
|
/**
* Edge Impulse API
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
|
export class AddCollaboratorRequest {
/**
* Username or e-mail address
*/
'usernameOrEmail': string;
static discriminator: string | undefined = undefined;
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
{
"name": "usernameOrEmail",
"baseName": "usernameOrEmail",
"type": "string"
} ];
static getAttributeTypeMap() {
return AddCollaboratorRequest.attributeTypeMap;
}
}
|
* Do not edit the class manually.
*/
|
Data_analyze.py
|
# -----------------------------------------------
# ................. LIBRARIES ...................
# -----------------------------------------------
import glob
import os
import time
import numpy as np
# -----------------------------------------------
# ............. GLOBAL VARIABLES ................
# -----------------------------------------------
name = "100_9x9Aya" # 9x9Natsukaze || 9x9Aya || x_9x9Aya .. x = amount moves
file_name = name + "_binary.txt"
binary_path = "Data/Binary/" + file_name
original_path = "/home/kristoffer/Documents/Data/Original/9x9_10k_r104_144x20k/*"
encoding = "UTF-8" # ISO-8859-1 / UTF-8
multiple_files = True
unique_list = []
original_list = []
# [check_handicap(), check_duplication(), get_result_ratio(), check_moves(), remove_empty_lines()]
run_programs = [0, 0, 1, 0, 0]
# -----------------------------------------------
# ................. FUNCTIONS ...................
# -----------------------------------------------
def remove_empty_lines():
output_file = open("Data/Binary/" + name + "_binary_1.txt", "w+")
with open(binary_path, "r") as file:
for line in file:
if not line.isspace():
output_file.write(line)
def check_handicap():
file = open(original_path, 'r', encoding=encoding)
file_lines = file.readlines()
_handicap = file_lines[0].split("HA[")
print(_handicap)
handicap = _handicap[1][0]
print(handicap)
file.close()
def
|
():
file = open(binary_path, 'r', encoding=encoding)
global original_list
global unique_list
original_list = [line.strip() for line in file]
print("Original List Length:", len(original_list))
original_length = len(original_list)
unique_list = np.unique(original_list)
unique_length = len(unique_list)
print("Unique List Length:", unique_length)
print("Original - Unique:", original_length - unique_length, "\n")
file.close()
def get_result_ratio():
win = open("Data/Results-Split/" + name + "_win.txt", 'r')
loss = open("Data/Results-Split/" + name + "_loss.txt", 'r')
draw = open("Data/Results-Split/" + name + "_draw.txt", 'r')
win_amount = len(win.readlines())
loss_amount = len(loss.readlines())
draw_amount = len(draw.readlines())
total_amount = win_amount + loss_amount + draw_amount
print("Total Amount:", total_amount)
print("Amount of wins:", win_amount, ",", round(((win_amount * 100) / total_amount), 2), "%")
print("Amount of loss:", loss_amount, ",", round(((loss_amount * 100) / total_amount), 2), "%")
print("Amount of draw:", draw_amount, ",", round(((draw_amount * 100) / total_amount), 2), "%")
win.close()
loss.close()
draw.close()
def check_moves():
total_pos = 19
moves_list = []
def get_moves(_game_lines):
if "HA[" in _game_lines[0]:
handicap = int(_game_lines[0].split("HA[")[1][0])
else:
handicap = 0
_move_list = []
const = 4
for row in _game_lines[1:-1]:
x = translate(row[3])
y = translate(row[4])
if row[1] + row[2] == "AB":
for i in range(handicap):
x = translate(row[4 + (i * const)])
y = translate(row[5 + (i * const)])
_move = ["b", x, y]
if x != total_pos and y != total_pos:
_move_list.append(_move)
else:
if row[1] == "B":
_move = ["b", x, y]
if row[1] == "W":
_move = ["w", x, y]
if x != total_pos and y != total_pos:
_move_list.append(_move)
return _move_list
def translate(i):
if i == "a":
return 0
if i == "b":
return 1
if i == "c":
return 2
if i == "d":
return 3
if i == "e":
return 4
if i == "f":
return 5
if i == "g":
return 6
if i == "h":
return 7
if i == "i":
return 8
if i == "j":
return 9
if i == "k":
return 10
if i == "l":
return 11
if i == "m":
return 12
if i == "n":
return 13
if i == "o":
return 14
if i == "p":
return 15
if i == "q":
return 16
if i == "r":
return 17
if i == "s":
return 18
if i == "t":
return 19
counter = 1
total_files = len(glob.glob(os.path.join(original_path, '*.sgf')))
for infile in glob.glob(os.path.join(original_path, '*.sgf')):
start_time = time.time()
file = open(infile, 'r', encoding="ISO-8859-1")
file_lines = file.readlines()
moves_list.append(len(get_moves(file_lines)))
print(infile)
print("Getting moves from file ", counter, "out of", total_files,
"files. ............................................... ",
round((counter / total_files * 100), 2), "% ............................................... ",
round((time.time() - start_time) * 1000, 2), "ms", "\n")
counter = counter + 1
file.close()
unique_moves_list, unique_moves_list_count = np.unique(moves_list, return_counts=True)
print(unique_moves_list, "\n")
print(unique_moves_list_count, "\n")
total_data = sum(unique_moves_list_count)
for x, y in np.nditer([unique_moves_list, unique_moves_list_count]):
print("Moves: %d : Amount: %d, %d %%" % (int(x), int(y), ((int(y)*100)/total_data)))
print("\n")
print("Unique Move lengths:", len(unique_moves_list))
# -----------------------------------------------
# .................. MAIN .......................
# -----------------------------------------------
if run_programs[0]:
check_handicap()
if run_programs[1]:
check_duplication()
if run_programs[2]:
get_result_ratio()
if run_programs[3]:
check_moves()
if run_programs[4]:
remove_empty_lines()
|
check_duplication
|
Map_Entry.rs
|
// WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "java-util-Map_Entry"))]
__jni_bindgen! {
/// public interface [Map.Entry](https://developer.android.com/reference/java/util/Map.Entry.html)
///
/// Required feature: java-util-Map_Entry
public interface Map_Entry ("java/util/Map$Entry") extends crate::java::lang::Object {
/// [getKey](https://developer.android.com/reference/java/util/Map.Entry.html#getKey())
///
/// Required features: "java-lang-Object"
#[cfg(any(feature = "all", all(feature = "java-lang-Object")))]
pub fn getKey<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::Object>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | ABSTRACT, .name == "getKey", .descriptor == "()Ljava/lang/Object;"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("java/util/Map$Entry\0", "getKey\0", "()Ljava/lang/Object;\0");
__jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [getValue](https://developer.android.com/reference/java/util/Map.Entry.html#getValue())
///
/// Required features: "java-lang-Object"
#[cfg(any(feature = "all", all(feature = "java-lang-Object")))]
pub fn getValue<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::Object>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | ABSTRACT, .name == "getValue", .descriptor == "()Ljava/lang/Object;"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("java/util/Map$Entry\0", "getValue\0", "()Ljava/lang/Object;\0");
__jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [setValue](https://developer.android.com/reference/java/util/Map.Entry.html#setValue(java.lang.Object))
///
/// Required features: "java-lang-Object"
#[cfg(any(feature = "all", all(feature = "java-lang-Object")))]
pub fn setValue<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::Object>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::Object>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | ABSTRACT, .name == "setValue", .descriptor == "(Ljava/lang/Object;)Ljava/lang/Object;"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("java/util/Map$Entry\0", "setValue\0", "(Ljava/lang/Object;)Ljava/lang/Object;\0");
__jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [equals](https://developer.android.com/reference/java/util/Map.Entry.html#equals(java.lang.Object))
///
/// Required features: "java-lang-Object"
#[cfg(any(feature = "all", all(feature = "java-lang-Object")))]
pub fn equals<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::Object>>) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | ABSTRACT, .name == "equals", .descriptor == "(Ljava/lang/Object;)Z"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("java/util/Map$Entry\0", "equals\0", "(Ljava/lang/Object;)Z\0");
__jni_env.call_boolean_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
|
/// [hashCode](https://developer.android.com/reference/java/util/Map.Entry.html#hashCode())
pub fn hashCode<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | ABSTRACT, .name == "hashCode", .descriptor == "()I"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("java/util/Map$Entry\0", "hashCode\0", "()I\0");
__jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [comparingByKey](https://developer.android.com/reference/java/util/Map.Entry.html#comparingByKey())
///
/// Required features: "java-util-Comparator"
#[cfg(any(feature = "all", all(feature = "java-util-Comparator")))]
pub fn comparingByKey<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::util::Comparator>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | STATIC, .name == "comparingByKey", .descriptor == "()Ljava/util/Comparator;"
unsafe {
let __jni_args = [];
let (__jni_class, __jni_method) = __jni_env.require_class_static_method("java/util/Map$Entry\0", "comparingByKey\0", "()Ljava/util/Comparator;\0");
__jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [comparingByValue](https://developer.android.com/reference/java/util/Map.Entry.html#comparingByValue())
///
/// Required features: "java-util-Comparator"
#[cfg(any(feature = "all", all(feature = "java-util-Comparator")))]
pub fn comparingByValue<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::util::Comparator>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | STATIC, .name == "comparingByValue", .descriptor == "()Ljava/util/Comparator;"
unsafe {
let __jni_args = [];
let (__jni_class, __jni_method) = __jni_env.require_class_static_method("java/util/Map$Entry\0", "comparingByValue\0", "()Ljava/util/Comparator;\0");
__jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [comparingByKey](https://developer.android.com/reference/java/util/Map.Entry.html#comparingByKey(java.util.Comparator))
///
/// Required features: "java-util-Comparator"
#[cfg(any(feature = "all", all(feature = "java-util-Comparator")))]
pub fn comparingByKey_Comparator<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::util::Comparator>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::util::Comparator>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | STATIC, .name == "comparingByKey", .descriptor == "(Ljava/util/Comparator;)Ljava/util/Comparator;"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_static_method("java/util/Map$Entry\0", "comparingByKey\0", "(Ljava/util/Comparator;)Ljava/util/Comparator;\0");
__jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [comparingByValue](https://developer.android.com/reference/java/util/Map.Entry.html#comparingByValue(java.util.Comparator))
///
/// Required features: "java-util-Comparator"
#[cfg(any(feature = "all", all(feature = "java-util-Comparator")))]
pub fn comparingByValue_Comparator<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::util::Comparator>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::util::Comparator>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/util/Map$Entry", java.flags == PUBLIC | STATIC, .name == "comparingByValue", .descriptor == "(Ljava/util/Comparator;)Ljava/util/Comparator;"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_static_method("java/util/Map$Entry\0", "comparingByValue\0", "(Ljava/util/Comparator;)Ljava/util/Comparator;\0");
__jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
}
}
|
}
}
|
lib.rs
|
/*
MIT License
Copyright (c) 2017 Viorel Bota
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Result;
pub struct Command
{
pub name: String,
pub arguments: Vec<String>,
}
impl Display for Command
{
fn fmt(&self, f: &mut Formatter) -> Result
|
}
|
{
write!(f, "Command: {:?} Arguments: {:?}", self.name, self.arguments )
}
|
add_layers_mixin.py
|
import itertools
import numpy as np
from .. import layers
from ..utils import colormaps
from ..utils.misc import ensure_iterable, is_iterable
from ..utils import io
class AddLayersMixin:
"""A mixin that adds add_* methods for adding layers to the ViewerModel.
Each method corresponds to adding one or more layers to the viewer.
Methods that just add a single layer contain the keyword arguments and
copies of the documentation from that the layer. These are copied and
pasted instead of being autogenerated because IDEs like PyCharm parse the
source code for docs instead of pulling it up dynamically.
These methods are separated into a mixin to keep the ViewerModel class
easier to read and make these methods easier to maintain.
"""
def add_layer(self, layer):
"""Add a layer to the viewer.
Parameters
----------
layer : napari.layers.Layer
Layer to add.
"""
layer.events.select.connect(self._update_active_layer)
layer.events.deselect.connect(self._update_active_layer)
layer.events.status.connect(self._update_status)
layer.events.help.connect(self._update_help)
layer.events.interactive.connect(self._update_interactive)
layer.events.cursor.connect(self._update_cursor)
layer.events.cursor_size.connect(self._update_cursor_size)
layer.events.data.connect(self._on_layers_change)
layer.dims.events.ndisplay.connect(self._on_layers_change)
layer.dims.events.order.connect(self._on_layers_change)
layer.dims.events.range.connect(self._on_layers_change)
self.layers.append(layer)
self._update_layers(layers=[layer])
if len(self.layers) == 1:
self.reset_view()
def add_image(
self,
data=None,
*,
channel_axis=None,
rgb=None,
is_pyramid=None,
colormap=None,
contrast_limits=None,
gamma=1,
interpolation='nearest',
rendering='mip',
iso_threshold=0.5,
attenuation=0.5,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending=None,
visible=True,
path=None,
):
"""Add an image layer to the layers list.
Parameters
----------
data : array or list of array
Image data. Can be N dimensional. If the last dimension has length
3 or 4 can be interpreted as RGB or RGBA if rgb is `True`. If a
list and arrays are decreasing in shape then the data is treated as
an image pyramid.
channel_axis : int, optional
Axis to expand image along.
rgb : bool
Whether the image is rgb RGB or RGBA. If not specified by user and
the last dimension of the data has length 3 or 4 it will be set as
`True`. If `False` the image is interpreted as a luminance image.
is_pyramid : bool
Whether the data is an image pyramid or not. Pyramid data is
represented by a list of array like image data. If not specified by
the user and if the data is a list of arrays that decrease in shape
then it will be taken to be a pyramid. The first image in the list
should be the largest.
colormap : str, vispy.Color.Colormap, tuple, dict, list
Colormaps to use for luminance images. If a string must be the name
of a supported colormap from vispy or matplotlib. If a tuple the
first value must be a string to assign as a name to a colormap and
the second item must be a Colormap. If a dict the key must be a
string to assign as a name to a colormap and the value must be a
Colormap. If a list then must be same length as the axis that is
being expanded as channels, and each colormap is applied to each
new image layer.
contrast_limits : list (2,)
Color limits to be used for determining the colormap bounds for
luminance images. If not passed is calculated as the min and max of
the image. If list of lists then must be same length as the axis
that is being expanded and then each colormap is applied to each
image.
gamma : list, float
Gamma correction for determining colormap linearity. Defaults to 1.
If a list then must be same length as the axis that is being
expanded and then each entry in the list is applied to each image.
interpolation : str
Interpolation mode used by vispy. Must be one of our supported
modes.
rendering : str
Rendering mode used by vispy. Must be one of our supported
modes.
iso_threshold : float
Threshold for isosurface.
attenuation : float
Attenuation rate for attenuated maximum intensity projection.
name : str
Name of the layer.
metadata : dict
Layer metadata.
scale : tuple of float
Scale factors for the layer.
translate : tuple of float
Translation values for the layer.
opacity : float
Opacity of the layer visual, between 0.0 and 1.0.
blending : str
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}.
visible : bool
Whether the layer visual is currently being displayed.
path : str or list of str
Path or list of paths to image data. Paths can be passed as strings
or `pathlib.Path` instances.
Returns
-------
layer : :class:`napari.layers.Image` or list
The newly-created image layer or list of image layers.
"""
if data is None and path is None:
|
elif data is not None and path is not None:
raise ValueError("Only one of data or path can be provided")
elif data is None:
data = io.magic_imread(path)
if channel_axis is None:
if colormap is None:
colormap = 'gray'
if blending is None:
blending = 'translucent'
layer = layers.Image(
data,
rgb=rgb,
is_pyramid=is_pyramid,
colormap=colormap,
contrast_limits=contrast_limits,
gamma=gamma,
interpolation=interpolation,
rendering=rendering,
iso_threshold=iso_threshold,
attenuation=attenuation,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
return layer
else:
if is_pyramid:
n_channels = data[0].shape[channel_axis]
else:
n_channels = data.shape[channel_axis]
name = ensure_iterable(name)
if blending is None:
blending = 'additive'
if colormap is None:
if n_channels < 3:
colormap = colormaps.MAGENTA_GREEN
else:
colormap = itertools.cycle(colormaps.CYMRGB)
else:
colormap = ensure_iterable(colormap)
# If one pair of clim values is passed then need to iterate them to
# all layers.
if contrast_limits is not None and not is_iterable(
contrast_limits[0]
):
contrast_limits = itertools.repeat(contrast_limits)
else:
contrast_limits = ensure_iterable(contrast_limits)
gamma = ensure_iterable(gamma)
layer_list = []
zipped_args = zip(
range(n_channels), colormap, contrast_limits, gamma, name
)
for i, cmap, clims, _gamma, name in zipped_args:
if is_pyramid:
image = [
np.take(data[j], i, axis=channel_axis)
for j in range(len(data))
]
else:
image = np.take(data, i, axis=channel_axis)
layer = layers.Image(
image,
rgb=rgb,
colormap=cmap,
contrast_limits=clims,
gamma=_gamma,
interpolation=interpolation,
rendering=rendering,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
layer_list.append(layer)
return layer_list
def add_points(
self,
data=None,
*,
properties=None,
symbol='o',
size=10,
edge_width=1,
edge_color='black',
edge_color_cycle=None,
edge_colormap='viridis',
edge_contrast_limits=None,
face_color='white',
face_color_cycle=None,
face_colormap='viridis',
face_contrast_limits=None,
n_dimensional=False,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending='translucent',
visible=True,
):
"""Add a points layer to the layers list.
Parameters
----------
data : array (N, D)
Coordinates for N points in D dimensions.
properties : dict {str: array (N,)}, DataFrame
Properties for each point. Each property should be an array of length N,
where N is the number of points.
symbol : str
Symbol to be used for the point markers. Must be one of the
following: arrow, clobber, cross, diamond, disc, hbar, ring,
square, star, tailed_arrow, triangle_down, triangle_up, vbar, x.
size : float, array
Size of the point marker. If given as a scalar, all points are made
the same size. If given as an array, size must be the same
broadcastable to the same shape as the data.
edge_width : float
Width of the symbol edge in pixels.
edge_color : str, array-like
Color of the point marker border. Numeric color values should be RGB(A).
edge_color_cycle : np.ndarray, list, cycle
Cycle of colors (provided as RGBA) to map to edge_color if a
categorical attribute is used to set face_color.
edge_colormap : str, vispy.color.colormap.Colormap
Colormap to set edge_color if a continuous attribute is used to set face_color.
See vispy docs for details: http://vispy.org/color.html#vispy.color.Colormap
edge_contrast_limits : None, (float, float)
clims for mapping the property to a color map. These are the min and max value
of the specified property that are mapped to 0 and 1, respectively.
The default value is None. If set the none, the clims will be set to
(property.min(), property.max())
face_color : str, array-like
Color of the point marker body. Numeric color values should be RGB(A).
face_color_cycle : np.ndarray, list, cycle
Cycle of colors (provided as RGBA) to map to face_color if a
categorical attribute is used to set face_color.
face_colormap : str, vispy.color.colormap.Colormap
Colormap to set face_color if a continuous attribute is used to set face_color.
See vispy docs for details: http://vispy.org/color.html#vispy.color.Colormap
face_contrast_limits : None, (float, float)
clims for mapping the property to a color map. These are the min and max value
of the specified property that are mapped to 0 and 1, respectively.
The default value is None. If set the none, the clims will be set to
(property.min(), property.max())
n_dimensional : bool
If True, renders points not just in central plane but also in all
n-dimensions according to specified point marker size.
name : str
Name of the layer.
metadata : dict
Layer metadata.
scale : tuple of float
Scale factors for the layer.
translate : tuple of float
Translation values for the layer.
opacity : float
Opacity of the layer visual, between 0.0 and 1.0.
blending : str
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}.
visible : bool
Whether the layer visual is currently being displayed.
Returns
-------
layer : :class:`napari.layers.Points`
The newly-created points layer.
Notes
-----
See vispy's marker visual docs for more details:
http://api.vispy.org/en/latest/visuals.html#vispy.visuals.MarkersVisual
"""
if data is None:
ndim = max(self.dims.ndim, 2)
data = np.empty([0, ndim])
layer = layers.Points(
data=data,
properties=properties,
symbol=symbol,
size=size,
edge_width=edge_width,
edge_color=edge_color,
edge_color_cycle=edge_color_cycle,
edge_colormap=edge_colormap,
edge_contrast_limits=edge_contrast_limits,
face_color=face_color,
face_color_cycle=face_color_cycle,
face_colormap=face_colormap,
face_contrast_limits=face_contrast_limits,
n_dimensional=n_dimensional,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
return layer
def add_labels(
self,
data=None,
*,
is_pyramid=None,
num_colors=50,
seed=0.5,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=0.7,
blending='translucent',
visible=True,
path=None,
):
"""Add a labels (or segmentation) layer to the layers list.
An image-like layer where every pixel contains an integer ID
corresponding to the region it belongs to.
Parameters
----------
data : array or list of array
Labels data as an array or pyramid.
is_pyramid : bool
Whether the data is an image pyramid or not. Pyramid data is
represented by a list of array like image data. If not specified by
the user and if the data is a list of arrays that decrease in shape
then it will be taken to be a pyramid. The first image in the list
should be the largest.
num_colors : int
Number of unique colors to use in colormap.
seed : float
Seed for colormap random generator.
name : str
Name of the layer.
metadata : dict
Layer metadata.
scale : tuple of float
Scale factors for the layer.
translate : tuple of float
Translation values for the layer.
opacity : float
Opacity of the layer visual, between 0.0 and 1.0.
blending : str
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}.
visible : bool
Whether the layer visual is currently being displayed.
path : str or list of str
Path or list of paths to image data. Paths can be passed as strings
or `pathlib.Path` instances.
Returns
-------
layer : :class:`napari.layers.Labels`
The newly-created labels layer.
"""
if data is None and path is None:
raise ValueError("One of either data or path must be provided")
elif data is not None and path is not None:
raise ValueError("Only one of data or path can be provided")
elif data is None:
data = io.magic_imread(path)
layer = layers.Labels(
data,
is_pyramid=is_pyramid,
num_colors=num_colors,
seed=seed,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
return layer
def add_shapes(
self,
data=None,
*,
shape_type='rectangle',
edge_width=1,
edge_color='black',
face_color='white',
z_index=0,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=0.7,
blending='translucent',
visible=True,
):
"""Add a shapes layer to the layers list.
Parameters
----------
data : list or array
List of shape data, where each element is an (N, D) array of the
N vertices of a shape in D dimensions. Can be an 3-dimensional
array if each shape has the same number of vertices.
shape_type : string or list
String of shape shape_type, must be one of "{'line', 'rectangle',
'ellipse', 'path', 'polygon'}". If a list is supplied it must be
the same length as the length of `data` and each element will be
applied to each shape otherwise the same value will be used for all
shapes.
edge_width : float or list
Thickness of lines and edges. If a list is supplied it must be the
same length as the length of `data` and each element will be
applied to each shape otherwise the same value will be used for all
shapes.
edge_color : str or list
If string can be any color name recognized by vispy or hex value if
starting with `#`. If array-like must be 1-dimensional array with 3
or 4 elements. If a list is supplied it must be the same length as
the length of `data` and each element will be applied to each shape
otherwise the same value will be used for all shapes.
face_color : str or list
If string can be any color name recognized by vispy or hex value if
starting with `#`. If array-like must be 1-dimensional array with 3
or 4 elements. If a list is supplied it must be the same length as
the length of `data` and each element will be applied to each shape
otherwise the same value will be used for all shapes.
z_index : int or list
Specifier of z order priority. Shapes with higher z order are
displayed ontop of others. If a list is supplied it must be the
same length as the length of `data` and each element will be
applied to each shape otherwise the same value will be used for all
shapes.
name : str
Name of the layer.
metadata : dict
Layer metadata.
scale : tuple of float
Scale factors for the layer.
translate : tuple of float
Translation values for the layer.
opacity : float or list
Opacity of the layer visual, between 0.0 and 1.0.
blending : str
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}.
visible : bool
Whether the layer visual is currently being displayed.
Returns
-------
layer : :class:`napari.layers.Shapes`
The newly-created shapes layer.
"""
if data is None:
ndim = max(self.dims.ndim, 2)
data = np.empty((0, 0, ndim))
layer = layers.Shapes(
data=data,
shape_type=shape_type,
edge_width=edge_width,
edge_color=edge_color,
face_color=face_color,
z_index=z_index,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
return layer
def add_surface(
self,
data,
*,
colormap='gray',
contrast_limits=None,
gamma=1,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=1,
blending='translucent',
visible=True,
):
"""Add a surface layer to the layers list.
Parameters
----------
data : 3-tuple of array
The first element of the tuple is an (N, D) array of vertices of
mesh triangles. The second is an (M, 3) array of int of indices
of the mesh triangles. The third element is the (K0, ..., KL, N)
array of values used to color vertices where the additional L
dimensions are used to color the same mesh with different values.
colormap : str, vispy.Color.Colormap, tuple, dict
Colormap to use for luminance images. If a string must be the name
of a supported colormap from vispy or matplotlib. If a tuple the
first value must be a string to assign as a name to a colormap and
the second item must be a Colormap. If a dict the key must be a
string to assign as a name to a colormap and the value must be a
Colormap.
contrast_limits : list (2,)
Color limits to be used for determining the colormap bounds for
luminance images. If not passed is calculated as the min and max of
the image.
gamma : float
Gamma correction for determining colormap linearity. Defaults to 1.
name : str
Name of the layer.
metadata : dict
Layer metadata.
scale : tuple of float
Scale factors for the layer.
translate : tuple of float
Translation values for the layer.
opacity : float
Opacity of the layer visual, between 0.0 and 1.0.
blending : str
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}.
visible : bool
Whether the layer visual is currently being displayed.
Returns
-------
layer : :class:`napari.layers.Surface`
The newly-created surface layer.
"""
layer = layers.Surface(
data,
colormap=colormap,
contrast_limits=contrast_limits,
gamma=gamma,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
return layer
def add_vectors(
self,
data,
*,
edge_width=1,
edge_color='red',
length=1,
name=None,
metadata=None,
scale=None,
translate=None,
opacity=0.7,
blending='translucent',
visible=True,
):
"""Add a vectors layer to the layers list.
Parameters
----------
data : (N, 2, D) or (N1, N2, ..., ND, D) array
An (N, 2, D) array is interpreted as "coordinate-like" data and a
list of N vectors with start point and projections of the vector in
D dimensions. An (N1, N2, ..., ND, D) array is interpreted as
"image-like" data where there is a length D vector of the
projections at each pixel.
edge_width : float
Width for all vectors in pixels.
length : float
Multiplicative factor on projections for length of all vectors.
edge_color : str
Edge color of all the vectors.
name : str
Name of the layer.
metadata : dict
Layer metadata.
scale : tuple of float
Scale factors for the layer.
translate : tuple of float
Translation values for the layer.
opacity : float
Opacity of the layer visual, between 0.0 and 1.0.
blending : str
One of a list of preset blending modes that determines how RGB and
alpha values of the layer visual get mixed. Allowed values are
{'opaque', 'translucent', and 'additive'}.
visible : bool
Whether the layer visual is currently being displayed.
Returns
-------
layer : :class:`napari.layers.Vectors`
The newly-created vectors layer.
"""
layer = layers.Vectors(
data,
edge_width=edge_width,
edge_color=edge_color,
length=length,
name=name,
metadata=metadata,
scale=scale,
translate=translate,
opacity=opacity,
blending=blending,
visible=visible,
)
self.add_layer(layer)
return layer
def _add_layer_from_data(
self, data, meta: dict = None, layer_type: str = 'image'
):
"""Add arbitrary layer data to the viewer.
Primarily intended for usage by reader plugin hooks.
Parameters
----------
data : Any
Data in a format that is valid for the corresponding `add_*` method
of the specified ``layer_type``.
meta : dict, optional
Dict of keyword arguments that will be passed to the corresponding
`add_*` method. MUST NOT contain any keyword arguments that are
not valid for the corresponding method.
layer_type : str
Type of layer to add. MUST have a corresponding add_* method on
on the viewer instance.
Raises
------
ValueError
If ``layer_type`` is not one of the recognized layer types.
TypeError
If any keyword arguments in ``meta`` are unexpected for the
corresponding `add_*` method for this layer_type.
Examples
--------
A typical use case might be to upack a tuple of layer data with a
specified layer_type.
>>> viewer = napari.Viewer()
>>> data = (
... np.random.random((10, 2)) * 20,
... {'face_color': 'blue'},
... 'points',
... )
>>> viewer._add_layer_from_data(*data)
"""
layer_type = layer_type.lower()
if layer_type not in layers.NAMES:
raise ValueError(
f"Unrecognized layer_type: '{layer_type}'. "
f"Must be one of: {layers.NAMES}."
)
try:
add_method = getattr(self, 'add_' + layer_type)
except AttributeError:
raise NotImplementedError(
f"Sorry! {layer_type} is a valid layer type, but there is no "
f"viewer.add_{layer_type} available yet."
)
try:
add_method(data, **(meta or {}))
except TypeError as exc:
if 'unexpected keyword argument' in str(exc):
bad_key = str(exc).split('keyword argument ')[-1]
raise TypeError(
"_add_layer_from_data received an unexpected keyword "
f"argument ({bad_key}) for layer type {layer_type}"
) from exc
|
raise ValueError("One of either data or path must be provided")
|
_build.rs
|
use std::env;
use std::path::PathBuf;
fn
|
() {
let target = env::var("TARGET").unwrap();
if target.contains("pc-windows") {
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let mut lib_dir = manifest_dir.clone();
let mut dll_dir = manifest_dir.clone();
if target.contains("msvc") {
lib_dir.push("msvc");
dll_dir.push("msvc");
} else {
lib_dir.push("gnu-mingw");
dll_dir.push("gnu-mingw");
}
lib_dir.push("lib");
dll_dir.push("dll");
if target.contains("x86_64") {
lib_dir.push("64");
dll_dir.push("64");
} else {
lib_dir.push("32");
dll_dir.push("32");
}
println!("cargo:rustc-link-serach=all={}", lib_dir.display());
for entry in std::fs::read_dir(dll_dir).expect("Can't read DLL dir") {
let entry_path = entry.expect("invalid fs entry").path();
let file_name_result = entry_path.file_name();
let mut new_file_path = manifest_dir.clone();
if let Some(file_name) = file_name_result {
let file_name = file_name.to_str().unwrap();
if file_name.ends_with(".dll") {
new_file_path.push(file_name);
std::fs::copy(&entry_path, new_file_path.as_path()).expect("Can't copy from DLL dir");
}
}
}
}
}
|
main
|
index.ts
|
import { Instance, FlatpickrFn, DayElement } from "./types/instance";
import {
Options,
ParsedOptions,
DateLimit,
DateRangeLimit,
DateOption,
defaults as defaultOptions,
Hook,
HookKey,
HOOKS,
} from "./types/options";
import { Locale, CustomLocale, key as LocaleKey } from "./types/locale";
import English from "./l10n/default";
import { arrayify, debounce, int, pad, IncrementEvent } from "./utils";
import {
clearNode,
createElement,
createNumberInput,
findParent,
toggleClass,
getEventTarget,
} from "./utils/dom";
import {
compareDates,
createDateParser,
createDateFormatter,
duration,
isBetween,
} from "./utils/dates";
import { tokenRegex, monthToStr } from "./utils/formatting";
import "./utils/polyfills";
const DEBOUNCED_CHANGE_MS = 300;
function FlatpickrInstance(
element: HTMLElement,
instanceConfig?: Options
): Instance {
const self = {
config: {
...defaultOptions,
...flatpickr.defaultConfig,
} as ParsedOptions,
l10n: English,
} as Instance;
self.parseDate = createDateParser({ config: self.config, l10n: self.l10n });
self._handlers = [];
self.pluginElements = [];
self.loadedPlugins = [];
self._bind = bind;
self._setHoursFromDate = setHoursFromDate;
self._positionCalendar = positionCalendar;
self.changeMonth = changeMonth;
self.changeYear = changeYear;
self.clear = clear;
self.close = close;
self._createElement = createElement;
self.destroy = destroy;
self.isEnabled = isEnabled;
self.jumpToDate = jumpToDate;
self.open = open;
self.redraw = redraw;
self.set = set;
self.setDate = setDate;
self.toggle = toggle;
function
|
() {
self.utils = {
getDaysInMonth(month = self.currentMonth, yr = self.currentYear) {
if (month === 1 && ((yr % 4 === 0 && yr % 100 !== 0) || yr % 400 === 0))
return 29;
return self.l10n.daysInMonth[month];
},
};
}
function init() {
self.element = self.input = element as HTMLInputElement;
self.isOpen = false;
parseConfig();
setupLocale();
setupInputs();
setupDates();
setupHelperFunctions();
if (!self.isMobile) build();
bindEvents();
if (self.selectedDates.length || self.config.noCalendar) {
if (self.config.enableTime) {
setHoursFromDate(
self.config.noCalendar
? self.latestSelectedDateObj || self.config.minDate
: undefined
);
}
updateValue(false);
}
setCalendarWidth();
self.showTimeInput =
self.selectedDates.length > 0 || self.config.noCalendar;
const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
/* TODO: investigate this further
Currently, there is weird positioning behavior in safari causing pages
to scroll up. https://github.com/chmln/flatpickr/issues/563
However, most browsers are not Safari and positioning is expensive when used
in scale. https://github.com/chmln/flatpickr/issues/1096
*/
if (!self.isMobile && isSafari) {
positionCalendar();
}
triggerEvent("onReady");
}
function bindToInstance<F extends Function>(fn: F): F {
return fn.bind(self);
}
function setCalendarWidth() {
const config = self.config;
if (config.weekNumbers === false && config.showMonths === 1) return;
else if (config.noCalendar !== true) {
window.requestAnimationFrame(function() {
if (self.calendarContainer !== undefined) {
self.calendarContainer.style.visibility = "hidden";
self.calendarContainer.style.display = "block";
}
if (self.daysContainer !== undefined) {
const daysWidth = (self.days.offsetWidth + 1) * config.showMonths;
self.daysContainer.style.width = daysWidth + "px";
self.calendarContainer.style.width =
daysWidth +
(self.weekWrapper !== undefined
? self.weekWrapper.offsetWidth
: 0) +
"px";
self.calendarContainer.style.removeProperty("visibility");
self.calendarContainer.style.removeProperty("display");
}
});
}
}
/**
* The handler for all events targeting the time inputs
*/
function updateTime(
e?: MouseEvent | IncrementEvent | KeyboardEvent | FocusEvent
) {
if (self.selectedDates.length === 0) {
setDefaultTime();
}
if (e !== undefined && e.type !== "blur") {
timeWrapper(e);
}
const prevValue = self._input.value;
setHoursFromInputs();
updateValue();
if (self._input.value !== prevValue) {
self._debouncedChange();
}
}
function ampm2military(hour: number, amPM: string) {
return (hour % 12) + 12 * int(amPM === self.l10n.amPM[1]);
}
function military2ampm(hour: number) {
switch (hour % 24) {
case 0:
case 12:
return 12;
default:
return hour % 12;
}
}
/**
* Syncs the selected date object time with user's time input
*/
function setHoursFromInputs() {
if (self.hourElement === undefined || self.minuteElement === undefined)
return;
let hours = (parseInt(self.hourElement.value.slice(-2), 10) || 0) % 24,
minutes = (parseInt(self.minuteElement.value, 10) || 0) % 60,
seconds =
self.secondElement !== undefined
? (parseInt(self.secondElement.value, 10) || 0) % 60
: 0;
if (self.amPM !== undefined) {
hours = ampm2military(hours, self.amPM.textContent as string);
}
const limitMinHours =
self.config.minTime !== undefined ||
(self.config.minDate &&
self.minDateHasTime &&
self.latestSelectedDateObj &&
compareDates(self.latestSelectedDateObj, self.config.minDate, true) ===
0);
const limitMaxHours =
self.config.maxTime !== undefined ||
(self.config.maxDate &&
self.maxDateHasTime &&
self.latestSelectedDateObj &&
compareDates(self.latestSelectedDateObj, self.config.maxDate, true) ===
0);
if (limitMaxHours) {
const maxTime =
self.config.maxTime !== undefined
? self.config.maxTime
: (self.config.maxDate as Date);
hours = Math.min(hours, maxTime.getHours());
if (hours === maxTime.getHours())
minutes = Math.min(minutes, maxTime.getMinutes());
if (minutes === maxTime.getMinutes())
seconds = Math.min(seconds, maxTime.getSeconds());
}
if (limitMinHours) {
const minTime =
self.config.minTime !== undefined
? self.config.minTime
: (self.config.minDate as Date);
hours = Math.max(hours, minTime.getHours());
if (hours === minTime.getHours())
minutes = Math.max(minutes, minTime.getMinutes());
if (minutes === minTime.getMinutes())
seconds = Math.max(seconds, minTime.getSeconds());
}
setHours(hours, minutes, seconds);
}
/**
* Syncs time input values with a date
*/
function setHoursFromDate(dateObj?: Date) {
const date = dateObj || self.latestSelectedDateObj;
if (date) setHours(date.getHours(), date.getMinutes(), date.getSeconds());
}
function setDefaultHours() {
let hours = self.config.defaultHour;
let minutes = self.config.defaultMinute;
let seconds = self.config.defaultSeconds;
if (self.config.minDate !== undefined) {
const minHr = self.config.minDate.getHours();
const minMinutes = self.config.minDate.getMinutes();
hours = Math.max(hours, minHr);
if (hours === minHr) minutes = Math.max(minMinutes, minutes);
if (hours === minHr && minutes === minMinutes)
seconds = self.config.minDate.getSeconds();
}
if (self.config.maxDate !== undefined) {
const maxHr = self.config.maxDate.getHours();
const maxMinutes = self.config.maxDate.getMinutes();
hours = Math.min(hours, maxHr);
if (hours === maxHr) minutes = Math.min(maxMinutes, minutes);
if (hours === maxHr && minutes === maxMinutes)
seconds = self.config.maxDate.getSeconds();
}
setHours(hours, minutes, seconds);
}
/**
* Sets the hours, minutes, and optionally seconds
* of the latest selected date object and the
* corresponding time inputs
* @param {Number} hours the hour. whether its military
* or am-pm gets inferred from config
* @param {Number} minutes the minutes
* @param {Number} seconds the seconds (optional)
*/
function setHours(hours: number, minutes: number, seconds: number) {
if (self.latestSelectedDateObj !== undefined) {
self.latestSelectedDateObj.setHours(hours % 24, minutes, seconds || 0, 0);
}
if (!self.hourElement || !self.minuteElement || self.isMobile) return;
self.hourElement.value = pad(
!self.config.time_24hr
? ((12 + hours) % 12) + 12 * int(hours % 12 === 0)
: hours
);
self.minuteElement.value = pad(minutes);
if (self.amPM !== undefined)
self.amPM.textContent = self.l10n.amPM[int(hours >= 12)];
if (self.secondElement !== undefined)
self.secondElement.value = pad(seconds);
}
/**
* Handles the year input and incrementing events
* @param {Event} event the keyup or increment event
*/
function onYearInput(event: KeyboardEvent & IncrementEvent) {
const year =
parseInt((event.target as HTMLInputElement).value) + (event.delta || 0);
if (
year / 1000 > 1 ||
(event.key === "Enter" && !/[^\d]/.test(year.toString()))
) {
changeYear(year);
}
}
/**
* Essentially addEventListener + tracking
* @param {Element} element the element to addEventListener to
* @param {String} event the event name
* @param {Function} handler the event handler
*/
function bind<E extends Element | Window | Document>(
element: E | E[],
event: string | string[],
handler: (e?: any) => void,
options?: object
): void {
if (event instanceof Array)
return event.forEach(ev => bind(element, ev, handler, options));
if (element instanceof Array)
return element.forEach(el => bind(el, event, handler, options));
element.addEventListener(event, handler as EventListener, options);
self._handlers.push({
element: element as Element,
event,
handler,
options,
});
}
/**
* A mousedown handler which mimics click.
* Minimizes latency, since we don't need to wait for mouseup in most cases.
* Also, avoids handling right clicks.
*
* @param {Function} handler the event handler
*/
function onClick<E extends MouseEvent>(
handler: (e: E) => void
): (e: E) => void {
return evt => {
evt.which === 1 && handler(evt);
};
}
function triggerChange() {
triggerEvent("onChange");
}
/**
* Adds all the necessary event listeners
*/
function bindEvents(): void {
if (self.config.wrap) {
["open", "close", "toggle", "clear"].forEach(evt => {
Array.prototype.forEach.call(
self.element.querySelectorAll(`[data-${evt}]`),
(el: HTMLElement) =>
bind(
el,
"click",
self[evt as "open" | "close" | "toggle" | "clear"]
)
);
});
}
if (self.isMobile) {
setupMobile();
return;
}
const debouncedResize = debounce(onResize, 50);
self._debouncedChange = debounce(triggerChange, DEBOUNCED_CHANGE_MS);
if (self.daysContainer && !/iPhone|iPad|iPod/i.test(navigator.userAgent))
bind(self.daysContainer, "mouseover", (e: MouseEvent) => {
if (self.config.mode === "range") onMouseOver(e.target as DayElement);
});
bind(window.document.body, "keydown", onKeyDown);
if (!self.config.inline && !self.config.static)
bind(window, "resize", debouncedResize);
if (window.ontouchstart !== undefined)
bind(window.document, "touchstart", documentClick);
else bind(window.document, "mousedown", onClick(documentClick));
bind(window.document, "focus", documentClick, { capture: true });
if (self.config.clickOpens === true) {
bind(self._input, "focus", self.open);
bind(self._input, "mousedown", onClick(self.open));
}
if (self.daysContainer !== undefined) {
bind(self.monthNav, "mousedown", onClick(onMonthNavClick));
bind(self.monthNav, ["keyup", "increment"], onYearInput);
bind(self.daysContainer, "mousedown", onClick(selectDate));
}
if (
self.timeContainer !== undefined &&
self.minuteElement !== undefined &&
self.hourElement !== undefined
) {
const selText = (e: FocusEvent) =>
(e.target as HTMLInputElement).select();
bind(self.timeContainer, ["increment"], updateTime);
bind(self.timeContainer, "blur", updateTime, { capture: true });
bind(self.timeContainer, "mousedown", onClick(timeIncrement));
bind([self.hourElement, self.minuteElement], ["focus", "click"], selText);
if (self.secondElement !== undefined)
bind(
self.secondElement,
"focus",
() => self.secondElement && self.secondElement.select()
);
if (self.amPM !== undefined) {
bind(
self.amPM,
"mousedown",
onClick(e => {
updateTime(e);
triggerChange();
})
);
}
}
}
/**
* Set the calendar view to a particular date.
* @param {Date} jumpDate the date to set the view to
* @param {boolean} triggerChange if change events should be triggered
*/
function jumpToDate(jumpDate?: DateOption, triggerChange?: boolean) {
const jumpTo =
jumpDate !== undefined
? self.parseDate(jumpDate)
: self.latestSelectedDateObj ||
(self.config.minDate && self.config.minDate > self.now
? (self.config.minDate as Date)
: self.config.maxDate && self.config.maxDate < self.now
? self.config.maxDate
: self.now);
const oldYear = self.currentYear;
const oldMonth = self.currentMonth;
try {
if (jumpTo !== undefined) {
self.currentYear = jumpTo.getFullYear();
self.currentMonth = jumpTo.getMonth();
}
} catch (e) {
/* istanbul ignore next */
e.message = "Invalid date supplied: " + jumpTo;
self.config.errorHandler(e);
}
if (triggerChange && self.currentYear !== oldYear) {
triggerEvent("onYearChange");
buildMonthSwitch();
}
if (
triggerChange &&
(self.currentYear !== oldYear || self.currentMonth !== oldMonth)
) {
triggerEvent("onMonthChange");
}
self.redraw();
}
/**
* The up/down arrow handler for time inputs
* @param {Event} e the click event
*/
function timeIncrement(e: KeyboardEvent | MouseEvent) {
if (~(e.target as Element).className.indexOf("arrow"))
incrementNumInput(
e,
(e.target as Element).classList.contains("arrowUp") ? 1 : -1
);
}
/**
* Increments/decrements the value of input associ-
* ated with the up/down arrow by dispatching an
* "increment" event on the input.
*
* @param {Event} e the click event
* @param {Number} delta the diff (usually 1 or -1)
* @param {Element} inputElem the input element
*/
function incrementNumInput(
e: KeyboardEvent | MouseEvent | undefined,
delta: number,
inputElem?: HTMLInputElement
) {
const target = e && (e.target as Element);
const input =
inputElem ||
(target && target.parentNode && target.parentNode.firstChild);
const event = createEvent("increment") as IncrementEvent;
event.delta = delta;
input && input.dispatchEvent(event);
}
function build() {
const fragment = window.document.createDocumentFragment();
self.calendarContainer = createElement<HTMLDivElement>(
"div",
"flatpickr-calendar"
);
self.calendarContainer.tabIndex = -1;
if (!self.config.noCalendar) {
fragment.appendChild(buildMonthNav());
self.innerContainer = createElement<HTMLDivElement>(
"div",
"flatpickr-innerContainer"
);
if (self.config.weekNumbers) {
const { weekWrapper, weekNumbers } = buildWeeks();
self.innerContainer.appendChild(weekWrapper);
self.weekNumbers = weekNumbers;
self.weekWrapper = weekWrapper;
}
self.rContainer = createElement<HTMLDivElement>(
"div",
"flatpickr-rContainer"
);
self.rContainer.appendChild(buildWeekdays());
if (!self.daysContainer) {
self.daysContainer = createElement<HTMLDivElement>(
"div",
"flatpickr-days"
);
self.daysContainer.tabIndex = -1;
}
buildDays();
self.rContainer.appendChild(self.daysContainer);
self.innerContainer.appendChild(self.rContainer);
fragment.appendChild(self.innerContainer);
}
if (self.config.enableTime) {
fragment.appendChild(buildTime());
}
toggleClass(
self.calendarContainer,
"rangeMode",
self.config.mode === "range"
);
toggleClass(
self.calendarContainer,
"animate",
self.config.animate === true
);
toggleClass(
self.calendarContainer,
"multiMonth",
self.config.showMonths > 1
);
self.calendarContainer.appendChild(fragment);
const customAppend =
self.config.appendTo !== undefined &&
self.config.appendTo.nodeType !== undefined;
if (self.config.inline || self.config.static) {
self.calendarContainer.classList.add(
self.config.inline ? "inline" : "static"
);
if (self.config.inline) {
if (!customAppend && self.element.parentNode)
self.element.parentNode.insertBefore(
self.calendarContainer,
self._input.nextSibling
);
else if (self.config.appendTo !== undefined)
self.config.appendTo.appendChild(self.calendarContainer);
}
if (self.config.static) {
const wrapper = createElement("div", "flatpickr-wrapper");
if (self.element.parentNode)
self.element.parentNode.insertBefore(wrapper, self.element);
wrapper.appendChild(self.element);
if (self.altInput) wrapper.appendChild(self.altInput);
wrapper.appendChild(self.calendarContainer);
}
}
if (!self.config.static && !self.config.inline)
(self.config.appendTo !== undefined
? self.config.appendTo
: window.document.body
).appendChild(self.calendarContainer);
}
function createDay(
className: string,
date: Date,
dayNumber: number,
i: number
) {
const dateIsEnabled = isEnabled(date, true),
dayElement = createElement<DayElement>(
"span",
"flatpickr-day " + className,
date.getDate().toString()
);
dayElement.dateObj = date;
dayElement.$i = i;
dayElement.setAttribute(
"aria-label",
self.formatDate(date, self.config.ariaDateFormat)
);
if (
className.indexOf("hidden") === -1 &&
compareDates(date, self.now) === 0
) {
self.todayDateElem = dayElement;
dayElement.classList.add("today");
dayElement.setAttribute("aria-current", "date");
}
if (dateIsEnabled) {
dayElement.tabIndex = -1;
if (isDateSelected(date)) {
dayElement.classList.add("selected");
self.selectedDateElem = dayElement;
if (self.config.mode === "range") {
toggleClass(
dayElement,
"startRange",
self.selectedDates[0] &&
compareDates(date, self.selectedDates[0], true) === 0
);
toggleClass(
dayElement,
"endRange",
self.selectedDates[1] &&
compareDates(date, self.selectedDates[1], true) === 0
);
if (className === "nextMonthDay") dayElement.classList.add("inRange");
}
}
} else {
dayElement.classList.add("flatpickr-disabled");
}
if (self.config.mode === "range") {
if (isDateInRange(date) && !isDateSelected(date))
dayElement.classList.add("inRange");
}
if (
self.weekNumbers &&
self.config.showMonths === 1 &&
className !== "prevMonthDay" &&
dayNumber % 7 === 1
) {
self.weekNumbers.insertAdjacentHTML(
"beforeend",
"<span class='flatpickr-day'>" + self.config.getWeek(date) + "</span>"
);
}
triggerEvent("onDayCreate", dayElement);
return dayElement;
}
function focusOnDayElem(targetNode: DayElement) {
targetNode.focus();
if (self.config.mode === "range") onMouseOver(targetNode);
}
function getFirstAvailableDay(delta: number) {
const startMonth = delta > 0 ? 0 : self.config.showMonths - 1;
const endMonth = delta > 0 ? self.config.showMonths : -1;
for (let m = startMonth; m != endMonth; m += delta) {
const month = (self.daysContainer as HTMLDivElement).children[m];
const startIndex = delta > 0 ? 0 : month.children.length - 1;
const endIndex = delta > 0 ? month.children.length : -1;
for (let i = startIndex; i != endIndex; i += delta) {
const c = month.children[i] as DayElement;
if (c.className.indexOf("hidden") === -1 && isEnabled(c.dateObj))
return c;
}
}
return undefined;
}
function getNextAvailableDay(current: DayElement, delta: number) {
const givenMonth =
current.className.indexOf("Month") === -1
? current.dateObj.getMonth()
: self.currentMonth;
const endMonth = delta > 0 ? self.config.showMonths : -1;
const loopDelta = delta > 0 ? 1 : -1;
for (
let m = givenMonth - self.currentMonth;
m != endMonth;
m += loopDelta
) {
const month = (self.daysContainer as HTMLDivElement).children[m];
const startIndex =
givenMonth - self.currentMonth === m
? current.$i + delta
: delta < 0
? month.children.length - 1
: 0;
const numMonthDays = month.children.length;
for (
let i = startIndex;
i >= 0 && i < numMonthDays && i != (delta > 0 ? numMonthDays : -1);
i += loopDelta
) {
const c = month.children[i] as DayElement;
if (
c.className.indexOf("hidden") === -1 &&
isEnabled(c.dateObj) &&
Math.abs(current.$i - i) >= Math.abs(delta)
)
return focusOnDayElem(c);
}
}
self.changeMonth(loopDelta);
focusOnDay(getFirstAvailableDay(loopDelta), 0);
return undefined;
}
function focusOnDay(current: DayElement | undefined, offset: number) {
const dayFocused = isInView(document.activeElement || document.body);
const startElem =
current !== undefined
? current
: dayFocused
? (document.activeElement as DayElement)
: self.selectedDateElem !== undefined && isInView(self.selectedDateElem)
? self.selectedDateElem
: self.todayDateElem !== undefined && isInView(self.todayDateElem)
? self.todayDateElem
: getFirstAvailableDay(offset > 0 ? 1 : -1);
if (startElem === undefined) return self._input.focus();
if (!dayFocused) return focusOnDayElem(startElem);
getNextAvailableDay(startElem, offset);
}
function buildMonthDays(year: number, month: number) {
const firstOfMonth =
(new Date(year, month, 1).getDay() - self.l10n.firstDayOfWeek + 7) % 7;
const prevMonthDays = self.utils.getDaysInMonth((month - 1 + 12) % 12);
const daysInMonth = self.utils.getDaysInMonth(month),
days = window.document.createDocumentFragment(),
isMultiMonth = self.config.showMonths > 1,
prevMonthDayClass = isMultiMonth ? "prevMonthDay hidden" : "prevMonthDay",
nextMonthDayClass = isMultiMonth ? "nextMonthDay hidden" : "nextMonthDay";
let dayNumber = prevMonthDays + 1 - firstOfMonth,
dayIndex = 0;
// prepend days from the ending of previous month
for (; dayNumber <= prevMonthDays; dayNumber++, dayIndex++) {
days.appendChild(
createDay(
prevMonthDayClass,
new Date(year, month - 1, dayNumber),
dayNumber,
dayIndex
)
);
}
// Start at 1 since there is no 0th day
for (dayNumber = 1; dayNumber <= daysInMonth; dayNumber++, dayIndex++) {
days.appendChild(
createDay("", new Date(year, month, dayNumber), dayNumber, dayIndex)
);
}
// append days from the next month
for (
let dayNum = daysInMonth + 1;
dayNum <= 42 - firstOfMonth &&
(self.config.showMonths === 1 || dayIndex % 7 !== 0);
dayNum++, dayIndex++
) {
days.appendChild(
createDay(
nextMonthDayClass,
new Date(year, month + 1, dayNum % daysInMonth),
dayNum,
dayIndex
)
);
}
//updateNavigationCurrentMonth();
const dayContainer = createElement<HTMLDivElement>("div", "dayContainer");
dayContainer.appendChild(days);
return dayContainer;
}
function buildDays() {
if (self.daysContainer === undefined) {
return;
}
clearNode(self.daysContainer);
// TODO: week numbers for each month
if (self.weekNumbers) clearNode(self.weekNumbers);
const frag = document.createDocumentFragment();
for (let i = 0; i < self.config.showMonths; i++) {
const d = new Date(self.currentYear, self.currentMonth, 1);
d.setMonth(self.currentMonth + i);
frag.appendChild(buildMonthDays(d.getFullYear(), d.getMonth()));
}
self.daysContainer.appendChild(frag);
self.days = self.daysContainer.firstChild as HTMLDivElement;
if (self.config.mode === "range" && self.selectedDates.length === 1) {
onMouseOver();
}
}
function buildMonthSwitch() {
if (
self.config.showMonths > 1 ||
self.config.monthSelectorType !== "dropdown"
)
return;
const shouldBuildMonth = function(month: number): boolean {
if (
self.config.minDate !== undefined &&
self.currentYear === self.config.minDate.getFullYear() &&
month < self.config.minDate.getMonth()
) {
return false;
}
return !(
self.config.maxDate !== undefined &&
self.currentYear === self.config.maxDate.getFullYear() &&
month > self.config.maxDate.getMonth()
);
};
self.monthsDropdownContainer.tabIndex = -1;
self.monthsDropdownContainer.innerHTML = "";
for (let i = 0; i < 12; i++) {
if (!shouldBuildMonth(i)) continue;
const month = createElement<HTMLOptionElement>(
"option",
"flatpickr-monthDropdown-month"
);
month.value = new Date(self.currentYear, i).getMonth().toString();
month.textContent = monthToStr(
i,
self.config.shorthandCurrentMonth,
self.l10n
);
month.tabIndex = -1;
if (self.currentMonth === i) {
month.selected = true;
}
self.monthsDropdownContainer.appendChild(month);
}
}
function buildMonth() {
const container = createElement("div", "flatpickr-month");
const monthNavFragment = window.document.createDocumentFragment();
let monthElement;
if (
self.config.showMonths > 1 ||
self.config.monthSelectorType === "static"
) {
monthElement = createElement<HTMLSpanElement>("span", "cur-month");
} else {
self.monthsDropdownContainer = createElement<HTMLSelectElement>(
"select",
"flatpickr-monthDropdown-months"
);
bind(self.monthsDropdownContainer, "change", (e: Event) => {
const target = e.target as HTMLSelectElement;
const selectedMonth = parseInt(target.value, 10);
self.changeMonth(selectedMonth - self.currentMonth);
triggerEvent("onMonthChange");
});
buildMonthSwitch();
monthElement = self.monthsDropdownContainer;
}
const yearInput = createNumberInput("cur-year", { tabindex: "-1" });
const yearElement = yearInput.getElementsByTagName(
"input"
)[0] as HTMLInputElement;
yearElement.setAttribute("aria-label", self.l10n.yearAriaLabel);
if (self.config.minDate) {
yearElement.setAttribute(
"min",
self.config.minDate.getFullYear().toString()
);
}
if (self.config.maxDate) {
yearElement.setAttribute(
"max",
self.config.maxDate.getFullYear().toString()
);
yearElement.disabled =
!!self.config.minDate &&
self.config.minDate.getFullYear() === self.config.maxDate.getFullYear();
}
const currentMonth = createElement<HTMLDivElement>(
"div",
"flatpickr-current-month"
);
currentMonth.appendChild(monthElement);
currentMonth.appendChild(yearInput);
monthNavFragment.appendChild(currentMonth);
container.appendChild(monthNavFragment);
return {
container,
yearElement,
monthElement,
};
}
function buildMonths() {
clearNode(self.monthNav);
self.monthNav.appendChild(self.prevMonthNav);
if (self.config.showMonths) {
self.yearElements = [];
self.monthElements = [];
}
for (let m = self.config.showMonths; m--; ) {
const month = buildMonth();
self.yearElements.push(month.yearElement);
self.monthElements.push(month.monthElement);
self.monthNav.appendChild(month.container);
}
self.monthNav.appendChild(self.nextMonthNav);
}
function buildMonthNav() {
self.monthNav = createElement<HTMLDivElement>("div", "flatpickr-months");
self.yearElements = [];
self.monthElements = [];
self.prevMonthNav = createElement<HTMLSpanElement>(
"span",
"flatpickr-prev-month"
);
self.prevMonthNav.innerHTML = self.config.prevArrow;
self.nextMonthNav = createElement("span", "flatpickr-next-month");
self.nextMonthNav.innerHTML = self.config.nextArrow;
buildMonths();
Object.defineProperty(self, "_hidePrevMonthArrow", {
get: () => self.__hidePrevMonthArrow,
set(bool: boolean) {
if (self.__hidePrevMonthArrow !== bool) {
toggleClass(self.prevMonthNav, "flatpickr-disabled", bool);
self.__hidePrevMonthArrow = bool;
}
},
});
Object.defineProperty(self, "_hideNextMonthArrow", {
get: () => self.__hideNextMonthArrow,
set(bool: boolean) {
if (self.__hideNextMonthArrow !== bool) {
toggleClass(self.nextMonthNav, "flatpickr-disabled", bool);
self.__hideNextMonthArrow = bool;
}
},
});
self.currentYearElement = self.yearElements[0];
updateNavigationCurrentMonth();
return self.monthNav;
}
function buildTime() {
self.calendarContainer.classList.add("hasTime");
if (self.config.noCalendar)
self.calendarContainer.classList.add("noCalendar");
self.timeContainer = createElement<HTMLDivElement>("div", "flatpickr-time");
self.timeContainer.tabIndex = -1;
const separator = createElement("span", "flatpickr-time-separator", ":");
const hourInput = createNumberInput("flatpickr-hour", {
"aria-label": self.l10n.hourAriaLabel,
});
self.hourElement = hourInput.getElementsByTagName(
"input"
)[0] as HTMLInputElement;
const minuteInput = createNumberInput("flatpickr-minute", {
"aria-label": self.l10n.minuteAriaLabel,
});
self.minuteElement = minuteInput.getElementsByTagName(
"input"
)[0] as HTMLInputElement;
self.hourElement.tabIndex = self.minuteElement.tabIndex = -1;
self.hourElement.value = pad(
self.latestSelectedDateObj
? self.latestSelectedDateObj.getHours()
: self.config.time_24hr
? self.config.defaultHour
: military2ampm(self.config.defaultHour)
);
self.minuteElement.value = pad(
self.latestSelectedDateObj
? self.latestSelectedDateObj.getMinutes()
: self.config.defaultMinute
);
self.hourElement.setAttribute("step", self.config.hourIncrement.toString());
self.minuteElement.setAttribute(
"step",
self.config.minuteIncrement.toString()
);
self.hourElement.setAttribute("min", self.config.time_24hr ? "0" : "1");
self.hourElement.setAttribute("max", self.config.time_24hr ? "23" : "12");
self.minuteElement.setAttribute("min", "0");
self.minuteElement.setAttribute("max", "59");
self.timeContainer.appendChild(hourInput);
self.timeContainer.appendChild(separator);
self.timeContainer.appendChild(minuteInput);
if (self.config.time_24hr) self.timeContainer.classList.add("time24hr");
if (self.config.enableSeconds) {
self.timeContainer.classList.add("hasSeconds");
const secondInput = createNumberInput("flatpickr-second");
self.secondElement = secondInput.getElementsByTagName(
"input"
)[0] as HTMLInputElement;
self.secondElement.value = pad(
self.latestSelectedDateObj
? self.latestSelectedDateObj.getSeconds()
: self.config.defaultSeconds
);
self.secondElement.setAttribute("step", self.minuteElement.getAttribute(
"step"
) as string);
self.secondElement.setAttribute("min", "0");
self.secondElement.setAttribute("max", "59");
self.timeContainer.appendChild(
createElement("span", "flatpickr-time-separator", ":")
);
self.timeContainer.appendChild(secondInput);
}
if (!self.config.time_24hr) {
// add self.amPM if appropriate
self.amPM = createElement(
"span",
"flatpickr-am-pm",
self.l10n.amPM[
int(
(self.latestSelectedDateObj
? self.hourElement.value
: self.config.defaultHour) > 11
)
]
);
self.amPM.title = self.l10n.toggleTitle;
self.amPM.tabIndex = -1;
self.timeContainer.appendChild(self.amPM);
}
return self.timeContainer;
}
function buildWeekdays() {
if (!self.weekdayContainer)
self.weekdayContainer = createElement<HTMLDivElement>(
"div",
"flatpickr-weekdays"
);
else clearNode(self.weekdayContainer);
for (let i = self.config.showMonths; i--; ) {
const container = createElement<HTMLDivElement>(
"div",
"flatpickr-weekdaycontainer"
);
self.weekdayContainer.appendChild(container);
}
updateWeekdays();
return self.weekdayContainer;
}
function updateWeekdays() {
const firstDayOfWeek = self.l10n.firstDayOfWeek;
let weekdays = [...self.l10n.weekdays.shorthand];
if (firstDayOfWeek > 0 && firstDayOfWeek < weekdays.length) {
weekdays = [
...weekdays.splice(firstDayOfWeek, weekdays.length),
...weekdays.splice(0, firstDayOfWeek),
];
}
for (let i = self.config.showMonths; i--; ) {
self.weekdayContainer.children[i].innerHTML = `
<span class='flatpickr-weekday'>
${weekdays.join("</span><span class='flatpickr-weekday'>")}
</span>
`;
}
}
/* istanbul ignore next */
function buildWeeks() {
self.calendarContainer.classList.add("hasWeeks");
const weekWrapper = createElement<HTMLDivElement>(
"div",
"flatpickr-weekwrapper"
);
weekWrapper.appendChild(
createElement("span", "flatpickr-weekday", self.l10n.weekAbbreviation)
);
const weekNumbers = createElement<HTMLDivElement>("div", "flatpickr-weeks");
weekWrapper.appendChild(weekNumbers);
return {
weekWrapper,
weekNumbers,
};
}
function changeMonth(value: number, isOffset = true) {
const delta = isOffset ? value : value - self.currentMonth;
if (
(delta < 0 && self._hidePrevMonthArrow === true) ||
(delta > 0 && self._hideNextMonthArrow === true)
)
return;
self.currentMonth += delta;
if (self.currentMonth < 0 || self.currentMonth > 11) {
self.currentYear += self.currentMonth > 11 ? 1 : -1;
self.currentMonth = (self.currentMonth + 12) % 12;
triggerEvent("onYearChange");
buildMonthSwitch();
}
buildDays();
triggerEvent("onMonthChange");
updateNavigationCurrentMonth();
}
function clear(triggerChangeEvent = true, toInitial = true) {
self.input.value = "";
if (self.altInput !== undefined) self.altInput.value = "";
if (self.mobileInput !== undefined) self.mobileInput.value = "";
self.selectedDates = [];
self.latestSelectedDateObj = undefined;
if (toInitial === true) {
self.currentYear = self._initialDate.getFullYear();
self.currentMonth = self._initialDate.getMonth();
}
self.showTimeInput = false;
if (self.config.enableTime === true) {
setDefaultHours();
}
self.redraw();
if (triggerChangeEvent)
// triggerChangeEvent is true (default) or an Event
triggerEvent("onChange");
}
function close() {
self.isOpen = false;
if (!self.isMobile) {
if (self.calendarContainer !== undefined) {
self.calendarContainer.classList.remove("open");
}
if (self._input !== undefined) {
self._input.classList.remove("active");
}
}
triggerEvent("onClose");
}
function destroy() {
if (self.config !== undefined) triggerEvent("onDestroy");
for (let i = self._handlers.length; i--; ) {
const h = self._handlers[i];
h.element.removeEventListener(
h.event,
h.handler as EventListener,
h.options
);
}
self._handlers = [];
if (self.mobileInput) {
if (self.mobileInput.parentNode)
self.mobileInput.parentNode.removeChild(self.mobileInput);
self.mobileInput = undefined;
} else if (self.calendarContainer && self.calendarContainer.parentNode) {
if (self.config.static && self.calendarContainer.parentNode) {
const wrapper = self.calendarContainer.parentNode;
wrapper.lastChild && wrapper.removeChild(wrapper.lastChild);
if (wrapper.parentNode) {
while (wrapper.firstChild)
wrapper.parentNode.insertBefore(wrapper.firstChild, wrapper);
wrapper.parentNode.removeChild(wrapper);
}
} else
self.calendarContainer.parentNode.removeChild(self.calendarContainer);
}
if (self.altInput) {
self.input.type = "text";
if (self.altInput.parentNode)
self.altInput.parentNode.removeChild(self.altInput);
delete self.altInput;
}
if (self.input) {
self.input.type = (self.input as any)._type;
self.input.classList.remove("flatpickr-input");
self.input.removeAttribute("readonly");
self.input.value = "";
}
([
"_showTimeInput",
"latestSelectedDateObj",
"_hideNextMonthArrow",
"_hidePrevMonthArrow",
"__hideNextMonthArrow",
"__hidePrevMonthArrow",
"isMobile",
"isOpen",
"selectedDateElem",
"minDateHasTime",
"maxDateHasTime",
"days",
"daysContainer",
"_input",
"_positionElement",
"innerContainer",
"rContainer",
"monthNav",
"todayDateElem",
"calendarContainer",
"weekdayContainer",
"prevMonthNav",
"nextMonthNav",
"monthsDropdownContainer",
"currentMonthElement",
"currentYearElement",
"navigationCurrentMonth",
"selectedDateElem",
"config",
] as (keyof Instance)[]).forEach(k => {
try {
delete self[k as keyof Instance];
} catch (_) {}
});
}
function isCalendarElem(elem: HTMLElement) {
if (self.config.appendTo && self.config.appendTo.contains(elem))
return true;
return self.calendarContainer.contains(elem);
}
function documentClick(e: MouseEvent) {
if (self.isOpen && !self.config.inline) {
const eventTarget = getEventTarget(e);
const isCalendarElement = isCalendarElem(eventTarget as HTMLElement);
const isInput =
eventTarget === self.input ||
eventTarget === self.altInput ||
self.element.contains(eventTarget as HTMLElement) ||
// web components
// e.path is not present in all browsers. circumventing typechecks
((e as any).path &&
(e as any).path.indexOf &&
(~(e as any).path.indexOf(self.input) ||
~(e as any).path.indexOf(self.altInput)));
const lostFocus =
e.type === "blur"
? isInput &&
e.relatedTarget &&
!isCalendarElem(e.relatedTarget as HTMLElement)
: !isInput &&
!isCalendarElement &&
!isCalendarElem(e.relatedTarget as HTMLElement);
const isIgnored = !self.config.ignoredFocusElements.some(elem =>
elem.contains(eventTarget as Node)
);
if (lostFocus && isIgnored) {
if (
self.timeContainer !== undefined &&
self.minuteElement !== undefined &&
self.hourElement !== undefined
) {
updateTime();
}
self.close();
if (self.config.mode === "range" && self.selectedDates.length === 1) {
self.clear(false);
self.redraw();
}
}
}
}
function changeYear(newYear: number) {
if (
!newYear ||
(self.config.minDate && newYear < self.config.minDate.getFullYear()) ||
(self.config.maxDate && newYear > self.config.maxDate.getFullYear())
)
return;
const newYearNum = newYear,
isNewYear = self.currentYear !== newYearNum;
self.currentYear = newYearNum || self.currentYear;
if (
self.config.maxDate &&
self.currentYear === self.config.maxDate.getFullYear()
) {
self.currentMonth = Math.min(
self.config.maxDate.getMonth(),
self.currentMonth
);
} else if (
self.config.minDate &&
self.currentYear === self.config.minDate.getFullYear()
) {
self.currentMonth = Math.max(
self.config.minDate.getMonth(),
self.currentMonth
);
}
if (isNewYear) {
self.redraw();
triggerEvent("onYearChange");
buildMonthSwitch();
}
}
function isEnabled(date: DateOption, timeless: boolean = true): boolean {
const dateToCheck = self.parseDate(date, undefined, timeless); // timeless
if (
(self.config.minDate &&
dateToCheck &&
compareDates(
dateToCheck,
self.config.minDate,
timeless !== undefined ? timeless : !self.minDateHasTime
) < 0) ||
(self.config.maxDate &&
dateToCheck &&
compareDates(
dateToCheck,
self.config.maxDate,
timeless !== undefined ? timeless : !self.maxDateHasTime
) > 0)
)
return false;
if (self.config.enable.length === 0 && self.config.disable.length === 0)
return true;
if (dateToCheck === undefined) return false;
const bool = self.config.enable.length > 0,
array = bool ? self.config.enable : self.config.disable;
for (let i = 0, d; i < array.length; i++) {
d = array[i];
if (
typeof d === "function" &&
d(dateToCheck) // disabled by function
)
return bool;
else if (
d instanceof Date &&
dateToCheck !== undefined &&
d.getTime() === dateToCheck.getTime()
)
// disabled by date
return bool;
else if (typeof d === "string" && dateToCheck !== undefined) {
// disabled by date string
const parsed = self.parseDate(d, undefined, true);
return parsed && parsed.getTime() === dateToCheck.getTime()
? bool
: !bool;
} else if (
// disabled by range
typeof d === "object" &&
dateToCheck !== undefined &&
(d as DateRangeLimit).from &&
(d as DateRangeLimit).to &&
dateToCheck.getTime() >= (d as DateRangeLimit<Date>).from.getTime() &&
dateToCheck.getTime() <= (d as DateRangeLimit<Date>).to.getTime()
)
return bool;
}
return !bool;
}
function isInView(elem: Element) {
if (self.daysContainer !== undefined)
return (
elem.className.indexOf("hidden") === -1 &&
self.daysContainer.contains(elem)
);
return false;
}
function onKeyDown(e: KeyboardEvent) {
// e.key e.keyCode
// "Backspace" 8
// "Tab" 9
// "Enter" 13
// "Escape" (IE "Esc") 27
// "ArrowLeft" (IE "Left") 37
// "ArrowUp" (IE "Up") 38
// "ArrowRight" (IE "Right") 39
// "ArrowDown" (IE "Down") 40
// "Delete" (IE "Del") 46
const isInput = e.target === self._input;
const allowInput = self.config.allowInput;
const allowKeydown = self.isOpen && (!allowInput || !isInput);
const allowInlineKeydown = self.config.inline && isInput && !allowInput;
if (e.keyCode === 13 && isInput) {
if (allowInput) {
self.setDate(
self._input.value,
true,
e.target === self.altInput
? self.config.altFormat
: self.config.dateFormat
);
return (e.target as HTMLElement).blur();
} else {
self.open();
}
} else if (
isCalendarElem(e.target as HTMLElement) ||
allowKeydown ||
allowInlineKeydown
) {
const isTimeObj =
!!self.timeContainer &&
self.timeContainer.contains(e.target as HTMLElement);
switch (e.keyCode) {
case 13:
if (isTimeObj) {
e.preventDefault();
updateTime();
focusAndClose();
} else selectDate(e);
break;
case 27: // escape
e.preventDefault();
focusAndClose();
break;
case 8:
case 46:
if (isInput && !self.config.allowInput) {
e.preventDefault();
self.clear();
}
break;
case 37:
case 39:
if (!isTimeObj && !isInput) {
e.preventDefault();
if (
self.daysContainer !== undefined &&
(allowInput === false ||
(document.activeElement && isInView(document.activeElement)))
) {
const delta = e.keyCode === 39 ? 1 : -1;
if (!e.ctrlKey) focusOnDay(undefined, delta);
else {
e.stopPropagation();
changeMonth(delta);
focusOnDay(getFirstAvailableDay(1), 0);
}
}
} else if (self.hourElement) self.hourElement.focus();
break;
case 38:
case 40:
e.preventDefault();
const delta = e.keyCode === 40 ? 1 : -1;
if (
(self.daysContainer && (e.target as DayElement).$i !== undefined) ||
e.target === self.input
) {
if (e.ctrlKey) {
e.stopPropagation();
changeYear(self.currentYear - delta);
focusOnDay(getFirstAvailableDay(1), 0);
} else if (!isTimeObj) focusOnDay(undefined, delta * 7);
} else if (e.target === self.currentYearElement) {
changeYear(self.currentYear - delta);
} else if (self.config.enableTime) {
if (!isTimeObj && self.hourElement) self.hourElement.focus();
updateTime(e);
self._debouncedChange();
}
break;
case 9:
if (isTimeObj) {
const elems = ([
self.hourElement,
self.minuteElement,
self.secondElement,
self.amPM,
] as Node[])
.concat(self.pluginElements)
.filter(x => x) as HTMLInputElement[];
const i = elems.indexOf(e.target as HTMLInputElement);
if (i !== -1) {
const target = elems[i + (e.shiftKey ? -1 : 1)];
e.preventDefault();
(target || self._input).focus();
}
} else if (
!self.config.noCalendar &&
self.daysContainer &&
self.daysContainer.contains(e.target as Node) &&
e.shiftKey
) {
e.preventDefault();
self._input.focus();
}
break;
default:
break;
}
}
if (self.amPM !== undefined && e.target === self.amPM) {
switch (e.key) {
case self.l10n.amPM[0].charAt(0):
case self.l10n.amPM[0].charAt(0).toLowerCase():
self.amPM.textContent = self.l10n.amPM[0];
setHoursFromInputs();
updateValue();
break;
case self.l10n.amPM[1].charAt(0):
case self.l10n.amPM[1].charAt(0).toLowerCase():
self.amPM.textContent = self.l10n.amPM[1];
setHoursFromInputs();
updateValue();
break;
}
}
if (isInput || isCalendarElem(e.target as HTMLElement)) {
triggerEvent("onKeyDown", e);
}
}
function onMouseOver(elem?: DayElement) {
if (
self.selectedDates.length !== 1 ||
(elem &&
(!elem.classList.contains("flatpickr-day") ||
elem.classList.contains("flatpickr-disabled")))
)
return;
const hoverDate = elem
? elem.dateObj.getTime()
: (self.days.firstElementChild as DayElement).dateObj.getTime(),
initialDate = (self.parseDate(
self.selectedDates[0],
undefined,
true
) as Date).getTime(),
rangeStartDate = Math.min(hoverDate, self.selectedDates[0].getTime()),
rangeEndDate = Math.max(hoverDate, self.selectedDates[0].getTime());
let containsDisabled = false;
let minRange = 0,
maxRange = 0;
for (let t = rangeStartDate; t < rangeEndDate; t += duration.DAY) {
if (!isEnabled(new Date(t), true)) {
containsDisabled =
containsDisabled || (t > rangeStartDate && t < rangeEndDate);
if (t < initialDate && (!minRange || t > minRange)) minRange = t;
else if (t > initialDate && (!maxRange || t < maxRange)) maxRange = t;
}
}
for (let m = 0; m < self.config.showMonths; m++) {
const month = (self.daysContainer as HTMLElement).children[m];
for (let i = 0, l = month.children.length; i < l; i++) {
const dayElem = month.children[i] as DayElement,
date = dayElem.dateObj;
const timestamp = date.getTime();
const outOfRange =
(minRange > 0 && timestamp < minRange) ||
(maxRange > 0 && timestamp > maxRange);
if (outOfRange) {
dayElem.classList.add("notAllowed");
["inRange", "startRange", "endRange"].forEach(c => {
dayElem.classList.remove(c);
});
continue;
} else if (containsDisabled && !outOfRange) continue;
["startRange", "inRange", "endRange", "notAllowed"].forEach(c => {
dayElem.classList.remove(c);
});
if (elem !== undefined) {
elem.classList.add(
hoverDate <= self.selectedDates[0].getTime()
? "startRange"
: "endRange"
);
if (initialDate < hoverDate && timestamp === initialDate)
dayElem.classList.add("startRange");
else if (initialDate > hoverDate && timestamp === initialDate)
dayElem.classList.add("endRange");
if (
timestamp >= minRange &&
(maxRange === 0 || timestamp <= maxRange) &&
isBetween(timestamp, initialDate, hoverDate)
)
dayElem.classList.add("inRange");
}
}
}
}
function onResize() {
if (self.isOpen && !self.config.static && !self.config.inline)
positionCalendar();
}
function setDefaultTime() {
self.setDate(
self.config.minDate !== undefined
? new Date(self.config.minDate.getTime())
: new Date(),
true
);
setDefaultHours();
updateValue();
}
function open(
e?: FocusEvent | MouseEvent,
positionElement = self._positionElement
) {
if (self.isMobile === true) {
if (e) {
e.preventDefault();
e.target && (e.target as HTMLInputElement).blur();
}
if (self.mobileInput !== undefined) {
self.mobileInput.focus();
self.mobileInput.click();
}
triggerEvent("onOpen");
return;
}
if (self._input.disabled || self.config.inline) return;
const wasOpen = self.isOpen;
self.isOpen = true;
if (!wasOpen) {
self.calendarContainer.classList.add("open");
self._input.classList.add("active");
triggerEvent("onOpen");
positionCalendar(positionElement);
}
if (self.config.enableTime === true && self.config.noCalendar === true) {
if (self.selectedDates.length === 0) {
setDefaultTime();
}
if (
self.config.allowInput === false &&
(e === undefined ||
!(self.timeContainer as HTMLDivElement).contains(
e.relatedTarget as Node
))
) {
setTimeout(() => (self.hourElement as HTMLInputElement).select(), 50);
}
}
}
function minMaxDateSetter(type: "min" | "max") {
return (date: DateOption) => {
const dateObj = (self.config[
`_${type}Date` as "_minDate" | "_maxDate"
] = self.parseDate(date, self.config.dateFormat));
const inverseDateObj =
self.config[
`_${type === "min" ? "max" : "min"}Date` as "_minDate" | "_maxDate"
];
if (dateObj !== undefined) {
self[type === "min" ? "minDateHasTime" : "maxDateHasTime"] =
(dateObj as Date).getHours() > 0 ||
(dateObj as Date).getMinutes() > 0 ||
(dateObj as Date).getSeconds() > 0;
}
if (self.selectedDates) {
self.selectedDates = self.selectedDates.filter(d => isEnabled(d));
if (!self.selectedDates.length && type === "min")
setHoursFromDate(dateObj);
updateValue();
}
if (self.daysContainer) {
redraw();
if (dateObj !== undefined)
self.currentYearElement[type] = dateObj.getFullYear().toString();
else self.currentYearElement.removeAttribute(type);
self.currentYearElement.disabled =
!!inverseDateObj &&
dateObj !== undefined &&
inverseDateObj.getFullYear() === dateObj.getFullYear();
}
};
}
function parseConfig() {
const boolOpts: (keyof Options)[] = [
"wrap",
"weekNumbers",
"allowInput",
"clickOpens",
"time_24hr",
"enableTime",
"noCalendar",
"altInput",
"shorthandCurrentMonth",
"inline",
"static",
"enableSeconds",
"disableMobile",
];
const userConfig = {
...instanceConfig,
...JSON.parse(JSON.stringify(element.dataset || {})),
} as Options;
const formats = {} as Record<"dateFormat" | "altFormat", string>;
self.config.parseDate = userConfig.parseDate;
self.config.formatDate = userConfig.formatDate;
Object.defineProperty(self.config, "enable", {
get: () => self.config._enable,
set: dates => {
self.config._enable = parseDateRules(dates);
},
});
Object.defineProperty(self.config, "disable", {
get: () => self.config._disable,
set: dates => {
self.config._disable = parseDateRules(dates);
},
});
const timeMode = userConfig.mode === "time";
if (!userConfig.dateFormat && (userConfig.enableTime || timeMode)) {
const defaultDateFormat =
flatpickr.defaultConfig.dateFormat || defaultOptions.dateFormat;
formats.dateFormat =
userConfig.noCalendar || timeMode
? "H:i" + (userConfig.enableSeconds ? ":S" : "")
: defaultDateFormat + " H:i" + (userConfig.enableSeconds ? ":S" : "");
}
if (
userConfig.altInput &&
(userConfig.enableTime || timeMode) &&
!userConfig.altFormat
) {
const defaultAltFormat =
flatpickr.defaultConfig.altFormat || defaultOptions.altFormat;
formats.altFormat =
userConfig.noCalendar || timeMode
? "h:i" + (userConfig.enableSeconds ? ":S K" : " K")
: defaultAltFormat + ` h:i${userConfig.enableSeconds ? ":S" : ""} K`;
}
if (!userConfig.altInputClass) {
self.config.altInputClass =
self.input.className + " " + self.config.altInputClass;
}
Object.defineProperty(self.config, "minDate", {
get: () => self.config._minDate,
set: minMaxDateSetter("min"),
});
Object.defineProperty(self.config, "maxDate", {
get: () => self.config._maxDate,
set: minMaxDateSetter("max"),
});
const minMaxTimeSetter = (type: string) => (val: any) => {
self.config[type === "min" ? "_minTime" : "_maxTime"] = self.parseDate(
val,
"H:i:S"
);
};
Object.defineProperty(self.config, "minTime", {
get: () => self.config._minTime,
set: minMaxTimeSetter("min"),
});
Object.defineProperty(self.config, "maxTime", {
get: () => self.config._maxTime,
set: minMaxTimeSetter("max"),
});
if (userConfig.mode === "time") {
self.config.noCalendar = true;
self.config.enableTime = true;
}
Object.assign(self.config, formats, userConfig);
for (let i = 0; i < boolOpts.length; i++)
self.config[boolOpts[i]] =
self.config[boolOpts[i]] === true ||
self.config[boolOpts[i]] === "true";
HOOKS.filter(hook => self.config[hook] !== undefined).forEach(hook => {
self.config[hook] = arrayify(self.config[hook] || []).map(bindToInstance);
});
self.isMobile =
!self.config.disableMobile &&
!self.config.inline &&
self.config.mode === "single" &&
!self.config.disable.length &&
!self.config.enable.length &&
!self.config.weekNumbers &&
/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(
navigator.userAgent
);
for (let i = 0; i < self.config.plugins.length; i++) {
const pluginConf = self.config.plugins[i](self) || ({} as Options);
for (const key in pluginConf) {
if (HOOKS.indexOf(key as HookKey) > -1) {
self.config[key as keyof Options] = arrayify(pluginConf[
key as HookKey
] as Hook)
.map(bindToInstance)
.concat(self.config[key as HookKey]);
} else if (typeof userConfig[key as keyof Options] === "undefined")
self.config[key as keyof ParsedOptions] = pluginConf[
key as keyof Options
] as any;
}
}
triggerEvent("onParseConfig");
}
function setupLocale() {
if (
typeof self.config.locale !== "object" &&
typeof flatpickr.l10ns[self.config.locale as LocaleKey] === "undefined"
)
self.config.errorHandler(
new Error(`flatpickr: invalid locale ${self.config.locale}`)
);
self.l10n = {
...(flatpickr.l10ns.default as Locale),
...(typeof self.config.locale === "object"
? self.config.locale
: self.config.locale !== "default"
? flatpickr.l10ns[self.config.locale as LocaleKey]
: undefined),
};
tokenRegex.K = `(${self.l10n.amPM[0]}|${
self.l10n.amPM[1]
}|${self.l10n.amPM[0].toLowerCase()}|${self.l10n.amPM[1].toLowerCase()})`;
const userConfig = {
...instanceConfig,
...JSON.parse(JSON.stringify(element.dataset || {})),
} as Options;
if (
userConfig.time_24hr === undefined &&
flatpickr.defaultConfig.time_24hr === undefined
) {
self.config.time_24hr = self.l10n.time_24hr;
}
self.formatDate = createDateFormatter(self);
self.parseDate = createDateParser({ config: self.config, l10n: self.l10n });
}
function positionCalendar(customPositionElement?: HTMLElement) {
if (self.calendarContainer === undefined) return;
triggerEvent("onPreCalendarPosition");
const positionElement = customPositionElement || self._positionElement;
const calendarHeight = Array.prototype.reduce.call(
self.calendarContainer.children,
((acc: number, child: HTMLElement) => acc + child.offsetHeight) as any,
0
),
calendarWidth = self.calendarContainer.offsetWidth,
configPos = self.config.position.split(" "),
configPosVertical = configPos[0],
configPosHorizontal = configPos.length > 1 ? configPos[1] : null,
inputBounds = positionElement.getBoundingClientRect(),
distanceFromBottom = window.innerHeight - inputBounds.bottom,
showOnTop =
configPosVertical === "above" ||
(configPosVertical !== "below" &&
distanceFromBottom < calendarHeight &&
inputBounds.top > calendarHeight);
let top =
window.pageYOffset +
inputBounds.top +
(!showOnTop ? positionElement.offsetHeight + 2 : -calendarHeight - 2);
toggleClass(self.calendarContainer, "arrowTop", !showOnTop);
toggleClass(self.calendarContainer, "arrowBottom", showOnTop);
if (self.config.inline) return;
const left =
window.pageXOffset +
inputBounds.left -
(configPosHorizontal != null && configPosHorizontal === "center"
? (calendarWidth - inputBounds.width) / 2
: 0);
const right = window.document.body.offsetWidth - (window.pageXOffset + inputBounds.right);
const rightMost = left + calendarWidth > window.document.body.offsetWidth;
const centerMost = right + calendarWidth > window.document.body.offsetWidth;
toggleClass(self.calendarContainer, "rightMost", rightMost);
if (self.config.static) return;
self.calendarContainer.style.top = `${top}px`;
if (!rightMost) {
self.calendarContainer.style.left = `${left}px`;
self.calendarContainer.style.right = "auto";
} else if (!centerMost) {
self.calendarContainer.style.left = "auto";
self.calendarContainer.style.right = `${right}px`;
} else {
const doc = document.styleSheets[0] as CSSStyleSheet;
// some testing environments don't have css support
if (doc === undefined) return;
const bodyWidth = window.document.body.offsetWidth;
const centerLeft = Math.max(0, bodyWidth / 2 - calendarWidth / 2);
const centerBefore = ".flatpickr-calendar.centerMost:before";
const centerAfter = ".flatpickr-calendar.centerMost:after";
const centerIndex = doc.cssRules.length;
const centerStyle = `{left:${inputBounds.left}px;right:auto;}`;
toggleClass(self.calendarContainer, "rightMost", false);
toggleClass(self.calendarContainer, "centerMost", true);
doc.insertRule(
`${centerBefore},${centerAfter}${centerStyle}`,
centerIndex
);
self.calendarContainer.style.left = `${centerLeft}px`;
self.calendarContainer.style.right = "auto";
}
}
function redraw() {
if (self.config.noCalendar || self.isMobile) return;
updateNavigationCurrentMonth();
buildDays();
}
function focusAndClose() {
self._input.focus();
if (
window.navigator.userAgent.indexOf("MSIE") !== -1 ||
navigator.msMaxTouchPoints !== undefined
) {
// hack - bugs in the way IE handles focus keeps the calendar open
setTimeout(self.close, 0);
} else {
self.close();
}
}
function selectDate(e: MouseEvent | KeyboardEvent) {
e.preventDefault();
e.stopPropagation();
const isSelectable = (day: Element) =>
day.classList &&
day.classList.contains("flatpickr-day") &&
!day.classList.contains("flatpickr-disabled") &&
!day.classList.contains("notAllowed");
const t = findParent(e.target as Element, isSelectable);
if (t === undefined) return;
const target = t as DayElement;
const selectedDate = (self.latestSelectedDateObj = new Date(
target.dateObj.getTime()
));
const shouldChangeMonth =
(selectedDate.getMonth() < self.currentMonth ||
selectedDate.getMonth() >
self.currentMonth + self.config.showMonths - 1) &&
self.config.mode !== "range";
self.selectedDateElem = target;
if (self.config.mode === "single") self.selectedDates = [selectedDate];
else if (self.config.mode === "multiple") {
const selectedIndex = isDateSelected(selectedDate);
if (selectedIndex) self.selectedDates.splice(parseInt(selectedIndex), 1);
else self.selectedDates.push(selectedDate);
} else if (self.config.mode === "range") {
if (self.selectedDates.length === 2) {
self.clear(false, false);
}
self.latestSelectedDateObj = selectedDate;
self.selectedDates.push(selectedDate);
// unless selecting same date twice, sort ascendingly
if (compareDates(selectedDate, self.selectedDates[0], true) !== 0)
self.selectedDates.sort((a, b) => a.getTime() - b.getTime());
}
setHoursFromInputs();
if (shouldChangeMonth) {
const isNewYear = self.currentYear !== selectedDate.getFullYear();
self.currentYear = selectedDate.getFullYear();
self.currentMonth = selectedDate.getMonth();
if (isNewYear) {
triggerEvent("onYearChange");
buildMonthSwitch();
}
triggerEvent("onMonthChange");
}
updateNavigationCurrentMonth();
buildDays();
updateValue();
if (self.config.enableTime)
setTimeout(() => (self.showTimeInput = true), 50);
// maintain focus
if (
!shouldChangeMonth &&
self.config.mode !== "range" &&
self.config.showMonths === 1
)
focusOnDayElem(target);
else if (
self.selectedDateElem !== undefined &&
self.hourElement === undefined
) {
self.selectedDateElem && self.selectedDateElem.focus();
}
if (self.hourElement !== undefined)
self.hourElement !== undefined && self.hourElement.focus();
if (self.config.closeOnSelect) {
const single = self.config.mode === "single" && !self.config.enableTime;
const range =
self.config.mode === "range" &&
self.selectedDates.length === 2 &&
!self.config.enableTime;
if (single || range) {
focusAndClose();
}
}
triggerChange();
}
const CALLBACKS: { [k in keyof Options]: Function[] } = {
locale: [setupLocale, updateWeekdays],
showMonths: [buildMonths, setCalendarWidth, buildWeekdays],
minDate: [jumpToDate],
maxDate: [jumpToDate],
};
function set<K extends keyof Options>(
option: K | { [k in K]?: Options[k] },
value?: any
) {
if (option !== null && typeof option === "object") {
Object.assign(self.config, option);
for (const key in option) {
if (CALLBACKS[key] !== undefined)
(CALLBACKS[key] as Function[]).forEach(x => x());
}
} else {
self.config[option] = value;
if (CALLBACKS[option] !== undefined)
(CALLBACKS[option] as Function[]).forEach(x => x());
else if (HOOKS.indexOf(option as HookKey) > -1)
self.config[option] = arrayify(value);
}
self.redraw();
updateValue(false);
}
function setSelectedDate(
inputDate: DateOption | DateOption[],
format?: string
) {
let dates: (Date | undefined)[] = [];
if (inputDate instanceof Array)
dates = inputDate.map(d => self.parseDate(d, format));
else if (inputDate instanceof Date || typeof inputDate === "number")
dates = [self.parseDate(inputDate, format)];
else if (typeof inputDate === "string") {
switch (self.config.mode) {
case "single":
case "time":
dates = [self.parseDate(inputDate, format)];
break;
case "multiple":
dates = inputDate
.split(self.config.conjunction)
.map(date => self.parseDate(date, format));
break;
case "range":
dates = inputDate
.split(self.l10n.rangeSeparator)
.map(date => self.parseDate(date, format));
break;
default:
break;
}
} else
self.config.errorHandler(
new Error(`Invalid date supplied: ${JSON.stringify(inputDate)}`)
);
self.selectedDates = dates.filter(
d => d instanceof Date && isEnabled(d, false)
) as Date[];
if (self.config.mode === "range")
self.selectedDates.sort((a, b) => a.getTime() - b.getTime());
}
function setDate(
date: DateOption | DateOption[],
triggerChange = false,
format = self.config.dateFormat
) {
if ((date !== 0 && !date) || (date instanceof Array && date.length === 0))
return self.clear(triggerChange);
setSelectedDate(date, format);
self.showTimeInput = self.selectedDates.length > 0;
self.latestSelectedDateObj =
self.selectedDates[self.selectedDates.length - 1];
self.redraw();
jumpToDate();
setHoursFromDate();
if (self.selectedDates.length === 0) {
self.clear(false);
}
updateValue(triggerChange);
if (triggerChange) triggerEvent("onChange");
}
function parseDateRules(arr: DateLimit[]): DateLimit<Date>[] {
return arr
.slice()
.map(rule => {
if (
typeof rule === "string" ||
typeof rule === "number" ||
rule instanceof Date
) {
return self.parseDate(
rule as Date | string | number,
undefined,
true
) as Date;
} else if (
rule &&
typeof rule === "object" &&
(rule as DateRangeLimit).from &&
(rule as DateRangeLimit).to
)
return {
from: self.parseDate(
(rule as DateRangeLimit).from,
undefined
) as Date,
to: self.parseDate((rule as DateRangeLimit).to, undefined) as Date,
};
return rule;
})
.filter(x => x) as DateLimit<Date>[]; // remove falsy values
}
function setupDates() {
self.selectedDates = [];
self.now = self.parseDate(self.config.now) || new Date();
// Workaround IE11 setting placeholder as the input's value
const preloadedDate =
self.config.defaultDate ||
((self.input.nodeName === "INPUT" ||
self.input.nodeName === "TEXTAREA") &&
self.input.placeholder &&
self.input.value === self.input.placeholder
? null
: self.input.value);
if (preloadedDate) setSelectedDate(preloadedDate, self.config.dateFormat);
self._initialDate =
self.selectedDates.length > 0
? self.selectedDates[0]
: self.config.minDate &&
self.config.minDate.getTime() > self.now.getTime()
? self.config.minDate
: self.config.maxDate &&
self.config.maxDate.getTime() < self.now.getTime()
? self.config.maxDate
: self.now;
self.currentYear = self._initialDate.getFullYear();
self.currentMonth = self._initialDate.getMonth();
if (self.selectedDates.length > 0)
self.latestSelectedDateObj = self.selectedDates[0];
if (self.config.minTime !== undefined)
self.config.minTime = self.parseDate(self.config.minTime, "H:i");
if (self.config.maxTime !== undefined)
self.config.maxTime = self.parseDate(self.config.maxTime, "H:i");
self.minDateHasTime =
!!self.config.minDate &&
(self.config.minDate.getHours() > 0 ||
self.config.minDate.getMinutes() > 0 ||
self.config.minDate.getSeconds() > 0);
self.maxDateHasTime =
!!self.config.maxDate &&
(self.config.maxDate.getHours() > 0 ||
self.config.maxDate.getMinutes() > 0 ||
self.config.maxDate.getSeconds() > 0);
Object.defineProperty(self, "showTimeInput", {
get: () => self._showTimeInput,
set(bool: boolean) {
self._showTimeInput = bool;
if (self.calendarContainer)
toggleClass(self.calendarContainer, "showTimeInput", bool);
self.isOpen && positionCalendar();
},
});
}
function setupInputs() {
self.input = self.config.wrap
? (element.querySelector("[data-input]") as HTMLInputElement)
: (element as HTMLInputElement);
/* istanbul ignore next */
if (!self.input) {
self.config.errorHandler(new Error("Invalid input element specified"));
return;
}
// hack: store previous type to restore it after destroy()
(self.input as any)._type = (self.input as any).type;
(self.input as any).type = "text";
self.input.classList.add("flatpickr-input");
self._input = self.input;
if (self.config.altInput) {
// replicate self.element
self.altInput = createElement<HTMLInputElement>(
self.input.nodeName as "input",
self.config.altInputClass
);
self._input = self.altInput;
self.altInput.placeholder = self.input.placeholder;
self.altInput.disabled = self.input.disabled;
self.altInput.required = self.input.required;
self.altInput.tabIndex = self.input.tabIndex;
self.altInput.type = "text";
self.input.setAttribute("type", "hidden");
if (!self.config.static && self.input.parentNode)
self.input.parentNode.insertBefore(
self.altInput,
self.input.nextSibling
);
}
if (!self.config.allowInput)
self._input.setAttribute("readonly", "readonly");
self._positionElement = self.config.positionElement || self._input;
}
function setupMobile() {
const inputType = self.config.enableTime
? self.config.noCalendar
? "time"
: "datetime-local"
: "date";
self.mobileInput = createElement<HTMLInputElement>(
"input",
self.input.className + " flatpickr-mobile"
);
self.mobileInput.step = self.input.getAttribute("step") || "any";
self.mobileInput.tabIndex = 1;
self.mobileInput.type = inputType;
self.mobileInput.disabled = self.input.disabled;
self.mobileInput.required = self.input.required;
self.mobileInput.placeholder = self.input.placeholder;
self.mobileFormatStr =
inputType === "datetime-local"
? "Y-m-d\\TH:i:S"
: inputType === "date"
? "Y-m-d"
: "H:i:S";
if (self.selectedDates.length > 0) {
self.mobileInput.defaultValue = self.mobileInput.value = self.formatDate(
self.selectedDates[0],
self.mobileFormatStr
);
}
if (self.config.minDate)
self.mobileInput.min = self.formatDate(self.config.minDate, "Y-m-d");
if (self.config.maxDate)
self.mobileInput.max = self.formatDate(self.config.maxDate, "Y-m-d");
self.input.type = "hidden";
if (self.altInput !== undefined) self.altInput.type = "hidden";
try {
if (self.input.parentNode)
self.input.parentNode.insertBefore(
self.mobileInput,
self.input.nextSibling
);
} catch {}
bind(self.mobileInput, "change", (e: KeyboardEvent) => {
self.setDate(
(e.target as HTMLInputElement).value,
false,
self.mobileFormatStr
);
triggerEvent("onChange");
triggerEvent("onClose");
});
}
function toggle(e?: FocusEvent | MouseEvent) {
if (self.isOpen === true) return self.close();
self.open(e);
}
function triggerEvent(event: HookKey, data?: any) {
// If the instance has been destroyed already, all hooks have been removed
if (self.config === undefined) return;
const hooks = self.config[event];
if (hooks !== undefined && hooks.length > 0) {
for (let i = 0; hooks[i] && i < hooks.length; i++)
hooks[i](self.selectedDates, self.input.value, self, data);
}
if (event === "onChange") {
self.input.dispatchEvent(createEvent("change"));
// many front-end frameworks bind to the input event
self.input.dispatchEvent(createEvent("input"));
}
}
function createEvent(name: string): Event {
const e = document.createEvent("Event");
e.initEvent(name, true, true);
return e;
}
function isDateSelected(date: Date) {
for (let i = 0; i < self.selectedDates.length; i++) {
if (compareDates(self.selectedDates[i], date) === 0) return "" + i;
}
return false;
}
function isDateInRange(date: Date) {
if (self.config.mode !== "range" || self.selectedDates.length < 2)
return false;
return (
compareDates(date, self.selectedDates[0]) >= 0 &&
compareDates(date, self.selectedDates[1]) <= 0
);
}
function updateNavigationCurrentMonth() {
if (self.config.noCalendar || self.isMobile || !self.monthNav) return;
self.yearElements.forEach((yearElement, i) => {
const d = new Date(self.currentYear, self.currentMonth, 1);
d.setMonth(self.currentMonth + i);
if (
self.config.showMonths > 1 ||
self.config.monthSelectorType === "static"
) {
self.monthElements[i].textContent =
monthToStr(
d.getMonth(),
self.config.shorthandCurrentMonth,
self.l10n
) + " ";
} else {
self.monthsDropdownContainer.value = d.getMonth().toString();
}
yearElement.value = d.getFullYear().toString();
});
self._hidePrevMonthArrow =
self.config.minDate !== undefined &&
(self.currentYear === self.config.minDate.getFullYear()
? self.currentMonth <= self.config.minDate.getMonth()
: self.currentYear < self.config.minDate.getFullYear());
self._hideNextMonthArrow =
self.config.maxDate !== undefined &&
(self.currentYear === self.config.maxDate.getFullYear()
? self.currentMonth + 1 > self.config.maxDate.getMonth()
: self.currentYear > self.config.maxDate.getFullYear());
}
function getDateStr(format: string) {
return self.selectedDates
.map(dObj => self.formatDate(dObj, format))
.filter(
(d, i, arr) =>
self.config.mode !== "range" ||
self.config.enableTime ||
arr.indexOf(d) === i
)
.join(
self.config.mode !== "range"
? self.config.conjunction
: self.l10n.rangeSeparator
);
}
/**
* Updates the values of inputs associated with the calendar
*/
function updateValue(triggerChange = true) {
if (self.mobileInput !== undefined && self.mobileFormatStr) {
self.mobileInput.value =
self.latestSelectedDateObj !== undefined
? self.formatDate(self.latestSelectedDateObj, self.mobileFormatStr)
: "";
}
self.input.value = getDateStr(self.config.dateFormat);
if (self.altInput !== undefined) {
self.altInput.value = getDateStr(self.config.altFormat);
}
if (triggerChange !== false) triggerEvent("onValueUpdate");
}
function onMonthNavClick(e: MouseEvent) {
const isPrevMonth = self.prevMonthNav.contains(e.target as Node);
const isNextMonth = self.nextMonthNav.contains(e.target as Node);
if (isPrevMonth || isNextMonth) {
changeMonth(isPrevMonth ? -1 : 1);
} else if (self.yearElements.indexOf(e.target as HTMLInputElement) >= 0) {
(e.target as HTMLInputElement).select();
} else if ((e.target as Element).classList.contains("arrowUp")) {
self.changeYear(self.currentYear + 1);
} else if ((e.target as Element).classList.contains("arrowDown")) {
self.changeYear(self.currentYear - 1);
}
}
function timeWrapper(
e: MouseEvent | KeyboardEvent | FocusEvent | IncrementEvent
): void {
e.preventDefault();
const isKeyDown = e.type === "keydown",
input = e.target as HTMLInputElement;
if (self.amPM !== undefined && e.target === self.amPM) {
self.amPM.textContent =
self.l10n.amPM[int(self.amPM.textContent === self.l10n.amPM[0])];
}
const min = parseFloat(input.getAttribute("min") as string),
max = parseFloat(input.getAttribute("max") as string),
step = parseFloat(input.getAttribute("step") as string),
curValue = parseInt(input.value, 10),
delta =
(e as IncrementEvent).delta ||
(isKeyDown ? ((e as KeyboardEvent).which === 38 ? 1 : -1) : 0);
let newValue = curValue + step * delta;
if (typeof input.value !== "undefined" && input.value.length === 2) {
const isHourElem = input === self.hourElement,
isMinuteElem = input === self.minuteElement;
if (newValue < min) {
newValue =
max +
newValue +
int(!isHourElem) +
(int(isHourElem) && int(!self.amPM));
if (isMinuteElem) incrementNumInput(undefined, -1, self.hourElement);
} else if (newValue > max) {
newValue =
input === self.hourElement ? newValue - max - int(!self.amPM) : min;
if (isMinuteElem) incrementNumInput(undefined, 1, self.hourElement);
}
if (
self.amPM &&
isHourElem &&
(step === 1
? newValue + curValue === 23
: Math.abs(newValue - curValue) > step)
) {
self.amPM.textContent =
self.l10n.amPM[int(self.amPM.textContent === self.l10n.amPM[0])];
}
input.value = pad(newValue);
}
}
init();
return self;
}
/* istanbul ignore next */
function _flatpickr(
nodeList: ArrayLike<Node>,
config?: Options
): Instance | Instance[] {
// static list
const nodes = Array.prototype.slice
.call(nodeList)
.filter(x => x instanceof HTMLElement) as HTMLElement[];
let instances: Instance[] = [];
for (let i = 0; i < nodes.length; i++) {
const node = nodes[i];
try {
if (node.getAttribute("data-fp-omit") !== null) continue;
if (node._flatpickr !== undefined) {
node._flatpickr.destroy();
node._flatpickr = undefined;
}
node._flatpickr = FlatpickrInstance(node, config || {});
instances.push(node._flatpickr);
} catch (e) {
console.error(e);
}
}
return instances.length === 1 ? instances[0] : instances;
}
/* istanbul ignore next */
if (
typeof HTMLElement !== "undefined" &&
typeof HTMLCollection !== "undefined" &&
typeof NodeList !== "undefined"
) {
// browser env
HTMLCollection.prototype.flatpickr = NodeList.prototype.flatpickr = function(
config?: Options
) {
return _flatpickr(this, config);
};
HTMLElement.prototype.flatpickr = function(config?: Options) {
return _flatpickr([this], config) as Instance;
};
}
/* istanbul ignore next */
var flatpickr = function(
selector: ArrayLike<Node> | Node | string,
config?: Options
) {
if (typeof selector === "string") {
return _flatpickr(window.document.querySelectorAll(selector), config);
} else if (selector instanceof Node) {
return _flatpickr([selector], config);
} else {
return _flatpickr(selector, config);
}
} as FlatpickrFn;
/* istanbul ignore next */
flatpickr.defaultConfig = {};
flatpickr.l10ns = {
en: { ...English },
default: { ...English },
};
flatpickr.localize = (l10n: CustomLocale) => {
flatpickr.l10ns.default = {
...flatpickr.l10ns.default,
...l10n,
};
};
flatpickr.setDefaults = (config: Options) => {
flatpickr.defaultConfig = {
...flatpickr.defaultConfig,
...(config as ParsedOptions),
};
};
flatpickr.parseDate = createDateParser({});
flatpickr.formatDate = createDateFormatter({});
flatpickr.compareDates = compareDates;
/* istanbul ignore next */
if (typeof jQuery !== "undefined" && typeof jQuery.fn !== "undefined") {
(jQuery.fn as any).flatpickr = function(config: Options) {
return _flatpickr(this, config);
};
}
// eslint-disable-next-line @typescript-eslint/camelcase
Date.prototype.fp_incr = function(days: number | string) {
return new Date(
this.getFullYear(),
this.getMonth(),
this.getDate() + (typeof days === "string" ? parseInt(days, 10) : days)
);
};
if (typeof window !== "undefined") {
window.flatpickr = flatpickr;
}
export default flatpickr;
|
setupHelperFunctions
|
__init__.py
|
"""Tests for the switchbot integration."""
from unittest.mock import patch
from homeassistant.const import CONF_MAC, CONF_NAME, CONF_PASSWORD, CONF_SENSOR_TYPE
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
DOMAIN = "switchbot"
ENTRY_CONFIG = {
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_MAC: "e7:89:43:99:99:99",
}
USER_INPUT = {
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_MAC: "e7:89:43:99:99:99",
}
USER_INPUT_UNSUPPORTED_DEVICE = {
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_MAC: "test",
}
USER_INPUT_INVALID = {
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_MAC: "invalid-mac",
}
YAML_CONFIG = {
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_MAC: "e7:89:43:99:99:99",
CONF_SENSOR_TYPE: "bot",
}
def _patch_async_setup_entry(return_value=True):
return patch(
"homeassistant.components.switchbot.async_setup_entry",
return_value=return_value,
)
async def
|
(
hass: HomeAssistant,
*,
data: dict = ENTRY_CONFIG,
skip_entry_setup: bool = False,
) -> MockConfigEntry:
"""Set up the Switchbot integration in Home Assistant."""
entry = MockConfigEntry(domain=DOMAIN, data=data)
entry.add_to_hass(hass)
if not skip_entry_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
init_integration
|
constants.ts
|
import { join } from 'path'
export const CDN_PATH = join(__dirname, '..', 'cdn')
export const DUMP_PATH = join(CDN_PATH, 'dumps')
export const LEGAL_PATH = join(__dirname, '..', 'legal')
export const BEATSAVER_EPOCH = 1525132800
export const FILE_EXT_WHITELIST = [
'.dat',
'.json',
'.egg',
'.ogg',
'.png',
'.jpg',
'.jpeg',
'.srt',
]
export const FILE_TYPE_BLACKLIST = [
'application/gzip',
'application/vnd.ms-cab-compressed',
'application/wasm',
|
'application/x-apple-diskimage',
'application/x-bzip2',
'application/x-compress',
'application/x-deb',
'application/x-google-chrome-extension',
'application/x-lzip',
'application/x-msdownload',
'application/x-msi',
'application/x-rar-compressed',
'application/x-rpm',
'application/x-shockwave-flash',
'application/x-sqlite3',
'application/x-tar',
'application/x-unix-archive',
'application/x-xz',
'application/x.ms.shortcut',
'application/zip',
'text/calendar',
]
const SCHEMA_BASE_URI =
'https://raw.githubusercontent.com/lolPants/beatmap-schemas/master/schemas'
export const SCHEMA_INFO = `${SCHEMA_BASE_URI}/info.schema.json`
export const SCHEMA_DIFFICULTY = `${SCHEMA_BASE_URI}/difficulty.schema.json`
|
'application/x-7z-compressed',
|
user_shell.rs
|
#![no_std]
#![no_main]
extern crate alloc;
#[macro_use]
extern crate user_lib;
const LF: u8 = 0x0au8;
const CR: u8 = 0x0du8;
const DL: u8 = 0x7fu8;
const BS: u8 = 0x08u8;
use alloc::string::String;
use user_lib::{fork, exec, waitpid};
use user_lib::console::getchar;
#[no_mangle]
pub fn main() -> i32 {
println!("Rust user shell");
let mut line: String = String::new();
print!(">> ");
loop {
let c = getchar();
match c {
LF | CR => {
println!("");
if !line.is_empty() {
line.push('\0');
let pid = fork();
if pid == 0 {
// child process
if exec(line.as_str()) == -1
|
unreachable!();
} else {
let mut exit_code: i32 = 0;
let exit_pid = waitpid(pid as usize, &mut exit_code);
assert_eq!(pid, exit_pid);
println!("Shell: Process {} exited with code {}", pid, exit_code);
}
line.clear();
}
print!(">> ");
}
BS | DL => {
if !line.is_empty() {
print!("{}", BS as char);
print!(" ");
print!("{}", BS as char);
line.pop();
}
}
_ => {
print!("{}", c as char);
line.push(c as char);
}
}
}
}
|
{
println!("Error when executing!");
return -4;
}
|
relations_new.rs
|
use crate::common::*;
#[test]
fn relation_happy_path() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
userId Int
user User @relation(fields: [userId], references: [id])
}
"#;
let schema = parse(dml);
let user_model = schema.assert_has_model("User");
user_model
.assert_has_relation_field("posts")
.assert_arity(&dml::FieldArity::List)
.assert_relation_to("Post")
.assert_relation_base_fields(&[])
.assert_relation_referenced_fields(&[]);
let post_model = schema.assert_has_model("Post");
post_model
.assert_has_relation_field("user")
.assert_arity(&dml::FieldArity::Required)
.assert_relation_to("User")
.assert_relation_base_fields(&["userId"])
.assert_relation_referenced_fields(&["id"]);
}
#[test]
fn relation_must_error_when_base_field_does_not_exist() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
user User @relation(fields: [userId], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The argument fields must refer only to existing fields. The following fields do not exist in this model: userId[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m text String
[1;94m11 | [0m user User @relation(fields: [1;91m[userId][0m, references: [id])
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_error_when_base_field_is_not_scalar() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
userId Int
otherId Int
user User @relation(fields: [other], references: [id])
other OtherModel @relation(fields: [otherId], references: [id])
}
model OtherModel {
id Int @id
posts Post[]
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The argument fields must refer only to scalar fields. But it is referencing the following relation fields: other[0m
[1;94m-->[0m [4mschema.prisma:14[0m
[1;94m | [0m
[1;94m13 | [0m
[1;94m14 | [0m user User @relation(fields: [1;91m[other][0m, references: [id])
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn optional_relation_field_must_succeed_when_all_underlying_fields_are_optional() {
let dml = r#"
model User {
id Int @id
firstName String?
lastName String?
posts Post[]
@@unique([firstName, lastName])
}
model Post {
id Int @id
text String
userFirstName String?
userLastName String?
user User? @relation(fields: [userFirstName, userLastName], references: [firstName, lastName])
}
"#;
// must not crash
let _ = parse(dml);
}
#[test]
fn required_relation_field_must_error_when_one_underlying_field_is_optional() {
let dml = r#"
model User {
id Int @id
firstName String
lastName String?
posts Post[]
@@unique([firstName, lastName])
}
model Post {
id Int @id
text String
userFirstName String
userLastName String?
user User @relation(fields: [userFirstName, userLastName], references: [firstName, lastName])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The relation field `user` uses the scalar fields userFirstName, userLastName. At least one of those fields is optional. Hence the relation field must be optional as well.[0m
[1;94m-->[0m [4mschema.prisma:17[0m
[1;94m | [0m
[1;94m16 | [0m
[1;94m17 | [0m [1;91muser User @relation(fields: [userFirstName, userLastName], references: [firstName, lastName])[0m
[1;94m18 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn optional_relation_field_must_succeed_when_at_least_one_underlying_fields_is_optional() {
let dml = r#"
model User {
id Int @id
firstName String
lastName String?
posts Post[]
@@unique([firstName, lastName])
}
model Post {
id Int @id
text String
userFirstName String
userLastName String?
user User? @relation(fields: [userFirstName, userLastName], references: [firstName, lastName])
}
"#;
// must not crash
let _ = parse(dml);
}
#[test]
fn required_relation_field_must_error_when_all_underlying_fields_are_optional() {
let dml = r#"
model User {
id Int @id
firstName String?
lastName String?
posts Post[]
@@unique([firstName, lastName])
}
model Post {
id Int @id
text String
userFirstName String?
userLastName String?
user User @relation(fields: [userFirstName, userLastName], references: [firstName, lastName])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The relation field `user` uses the scalar fields userFirstName, userLastName. At least one of those fields is optional. Hence the relation field must be optional as well.[0m
[1;94m-->[0m [4mschema.prisma:17[0m
[1;94m | [0m
[1;94m16 | [0m
[1;94m17 | [0m [1;91muser User @relation(fields: [userFirstName, userLastName], references: [firstName, lastName])[0m
[1;94m18 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn required_relation_field_must_error_if_it_is_virtual() {
let dml = r#"
model User {
id Int @id
address Address
}
model Address {
id Int @id
userId Int @unique
user User @relation(fields: [userId], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `address` on Model `User` is required. This is no longer valid because it's not possible to enforce this constraint on the database level. Please change the field type from `Address` to `Address?` to fix this.[0m
[1;94m-->[0m [4mschema.prisma:4[0m
[1;94m | [0m
[1;94m 3 | [0m id Int @id
[1;94m 4 | [0m [1;91maddress Address[0m
[1;94m 5 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_error_when_referenced_field_does_not_exist() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
userId Int
user User @relation(fields: [userId], references: [fooBar])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The argument `references` must refer only to existing fields in the related model `User`. The following fields do not exist in the related model: fooBar[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userId Int
[1;94m12 | [0m user User @[1;91mrelation(fields: [userId], references: [fooBar])[0m
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_error_when_referenced_field_is_not_scalar() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
userId Int
user User @relation(fields: [userId], references: [posts])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The argument `references` must refer only to scalar fields in the related model `User`. But it is referencing the following relation fields: posts[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userId Int
[1;94m12 | [0m user User @[1;91mrelation(fields: [userId], references: [posts])[0m
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_error_when_referenced_fields_are_not_a_unique_criteria() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
userName String
user User @relation(fields: [userName], references: [firstName])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The argument `references` must refer to a unique criteria in the related model `User`. But it is referencing the following fields that are not a unique criteria: firstName[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userName String
[1;94m12 | [0m [1;91muser User @relation(fields: [userName], references: [firstName])[0m
[1;94m13 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_succeed_when_referenced_fields_are_a_unique_criteria() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
@@unique([firstName])
}
model Post {
id Int @id
text String
userName String
user User @relation(fields: [userName], references: [firstName])
}
"#;
assert!(datamodel::parse_datamodel(dml).is_ok());
}
#[test]
fn relation_must_not_error_when_referenced_fields_are_not_a_unique_criteria_on_mysql() {
// MySQL allows foreign key to references a non unique criteria
// https://stackoverflow.com/questions/588741/can-a-foreign-key-reference-a-non-unique-index
let dml = r#"
datasource db {
provider = "mysql"
url = "mysql://localhost:3306"
}
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
text String
userName String
user User @relation(fields: [userName], references: [firstName])
}
"#;
let _ = parse(dml);
}
#[test]
fn relation_must_error_when_referenced_fields_are_multiple_uniques() {
let dml = r#"
model User {
id Int @id
firstName String @unique
posts Post[]
}
model Post {
id Int @id
text String
userId Int
userName String
// the relation is referencing two uniques. That is too much.
user User @relation(fields: [userId, userName], references: [id, firstName])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: The argument `references` must refer to a unique criteria in the related model `User`. But it is referencing the following fields that are not a unique criteria: id, firstName[0m
[1;94m-->[0m [4mschema.prisma:14[0m
[1;94m | [0m
[1;94m13 | [0m // the relation is referencing two uniques. That is too much.
[1;94m14 | [0m [1;91muser User @relation(fields: [userId, userName], references: [id, firstName])[0m
[1;94m15 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_error_when_types_of_base_field_and_referenced_field_do_not_match() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
userId String // this type does not match
user User @relation(fields: [userId], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The type of the field `userId` in the model `Post` is not matching the type of the referenced field `id` in model `User`.[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m userId String // this type does not match
[1;94m11 | [0m [1;91muser User @relation(fields: [userId], references: [id])[0m
[1;94m12 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn relation_must_error_when_number_of_fields_and_references_is_not_equal() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
userId Int
userName String
user User @relation(fields: [userId, userName], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating: You must specify the same number of fields in `fields` and `references`.[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userName String
[1;94m12 | [0m user User @[1;91mrelation(fields: [userId, userName], references: [id])[0m
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_references_argument_is_missing_for_one_to_many() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
userId Int
user User @relation(fields: [userId])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `user` on Model `Post` must specify the `references` argument in the @relation attribute.[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m userId Int
[1;94m11 | [0m [1;91muser User @relation(fields: [userId])[0m
[1;94m12 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_fields_or_references_argument_is_placed_on_wrong_side_for_one_to_many() {
let dml = r#"
datasource pg {
provider = "postgres"
url = "postgresql://localhost:5432"
}
model User {
id Int @id
postId Int[]
posts Post[] @relation(fields: [postId], references: [id])
}
model Post {
id Int @id
userId Int?
user User? @relation(fields: [userId], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `posts` on Model `User` must not specify the `fields` or `references` argument in the @relation attribute. You must only specify it on the opposite field `user` on model `Post`.[0m
[1;94m-->[0m [4mschema.prisma:10[0m
[1;94m | [0m
[1;94m 9 | [0m postId Int[]
[1;94m10 | [0m [1;91mposts Post[] @relation(fields: [postId], references: [id])[0m
[1;94m11 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_both_arguments_are_missing_for_one_to_many() {
let dml = r#"
model User {
id Int @id
firstName String
posts Post[]
}
model Post {
id Int @id
userId Int
user User
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `user` on Model `Post` must specify the `fields` argument in the @relation attribute. You can run `prisma format` to fix this automatically.[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m userId Int
[1;94m11 | [0m [1;91muser User[0m
[1;94m12 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `user` on Model `Post` must specify the `references` argument in the @relation attribute.[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m userId Int
[1;94m11 | [0m [1;91muser User[0m
[1;94m12 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_fields_argument_is_missing_for_one_to_one() {
let dml = r#"
model User {
id Int @id
firstName String
post Post?
}
model Post {
id Int @id
userId Int @unique
user User @relation(references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `user` on Model `Post` and `post` on Model `User` do not provide the `fields` argument in the @relation attribute. You have to provide it on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m userId Int @unique
[1;94m11 | [0m [1;91muser User @relation(references: [id])[0m
[1;94m12 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `post` on Model `User` and `user` on Model `Post` do not provide the `fields` argument in the @relation attribute. You have to provide it on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:5[0m
[1;94m | [0m
[1;94m 4 | [0m firstName String
[1;94m 5 | [0m [1;91mpost Post?[0m
[1;94m 6 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_references_argument_is_missing_for_one_to_one() {
let dml = r#"
model User {
id Int @id
firstName String
post Post
}
model Post {
id Int @id
userId Int @unique
user User @relation(fields: [userId])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `user` on Model `Post` and `post` on Model `User` do not provide the `references` argument in the @relation attribute. You have to provide it on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:11[0m
[1;94m | [0m
[1;94m10 | [0m userId Int @unique
[1;94m11 | [0m [1;91muser User @relation(fields: [userId])[0m
[1;94m12 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `post` on Model `User` and `user` on Model `Post` do not provide the `references` argument in the @relation attribute. You have to provide it on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:5[0m
[1;94m | [0m
[1;94m 4 | [0m firstName String
[1;94m 5 | [0m [1;91mpost Post[0m
[1;94m 6 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_fields_and_references_argument_are_placed_on_different_sides_for_one_to_one() {
let dml = r#"
model User {
id Int @id
firstName String
postId Int
post Post @relation(references: [id])
}
model Post {
id Int @id
userId Int @unique
user User @relation(fields: [userId])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `user` on Model `Post` provides the `fields` argument in the @relation attribute. And the related field `post` on Model `User` provides the `references` argument. You must provide both arguments on the same side.[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userId Int @unique
[1;94m12 | [0m [1;91muser User @relation(fields: [userId])[0m
[1;94m13 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `user` on Model `Post` provides the `fields` argument in the @relation attribute. And the related field `post` on Model `User` provides the `references` argument. You must provide both arguments on the same side.[0m
[1;94m-->[0m [4mschema.prisma:6[0m
[1;94m | [0m
[1;94m 5 | [0m postId Int
[1;94m 6 | [0m [1;91mpost Post @relation(references: [id])[0m
[1;94m 7 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_fields_or_references_argument_is_placed_on_both_sides_for_one_to_one() {
let dml = r#"
model User {
id Int @id
firstName String
postId Int
post Post @relation(fields: [postId], references: [id])
}
model Post {
id Int @id
userId Int @unique
user User @relation(fields: [userId], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `user` on Model `Post` and `post` on Model `User` both provide the `references` argument in the @relation attribute. You have to provide it only on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userId Int @unique
[1;94m12 | [0m [1;91muser User @relation(fields: [userId], references: [id])[0m
[1;94m13 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `user` on Model `Post` and `post` on Model `User` both provide the `references` argument in the @relation attribute. You have to provide it only on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:6[0m
[1;94m | [0m
[1;94m 5 | [0m postId Int
[1;94m 6 | [0m [1;91mpost Post @relation(fields: [postId], references: [id])[0m
[1;94m 7 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `user` on Model `Post` and `post` on Model `User` both provide the `fields` argument in the @relation attribute. You have to provide it only on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m userId Int @unique
[1;94m12 | [0m [1;91muser User @relation(fields: [userId], references: [id])[0m
[1;94m13 | [0m }
[1;94m | [0m
[1;91merror[0m: [1mError parsing attribute "@relation": The relation fields `user` on Model `Post` and `post` on Model `User` both provide the `fields` argument in the @relation attribute. You have to provide it only on one of the two fields.[0m
[1;94m-->[0m [4mschema.prisma:6[0m
[1;94m | [0m
[1;94m 5 | [0m postId Int
[1;94m 6 | [0m [1;91mpost Post @relation(fields: [postId], references: [id])[0m
[1;94m 7 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_for_required_one_to_one_self_relations() {
let dml = r#"
model User {
id Int @id
friendId Int @unique
friend User @relation("Friends", fields: friendId, references: id)
friendOf User @relation("Friends")
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `friendOf` on Model `User` is required. This is no longer valid because it's not possible to enforce this constraint on the database level. Please change the field type from `User` to `User?` to fix this.[0m
[1;94m-->[0m [4mschema.prisma:6[0m
[1;94m | [0m
[1;94m 5 | [0m friend User @relation("Friends", fields: friendId, references: id)
[1;94m 6 | [0m [1;91mfriendOf User @relation("Friends")[0m
[1;94m 7 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_nicely_when_a_many_to_many_is_not_possible() {
// many 2 many is not possible because Post does not have a singular id field
let dml = r#"
model Post {
id Int
slug Int @unique
categories Category[] @relation("foo")
@@id([id, slug])
}
model Category {
id Int @id @default(autoincrement())
posts Post[] @relation("foo")
}"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating field `posts` in model `Category`: The relation field `posts` on Model `Category` references `Post` which does not have an `@id` field. Models without `@id` cannot be part of a many to many relation. Use an explicit intermediate Model to represent this relationship.[0m
[1;94m-->[0m [4mschema.prisma:12[0m
[1;94m | [0m
[1;94m11 | [0m id Int @id @default(autoincrement())
[1;94m12 | [0m [1;91mposts Post[] @relation("foo")[0m
[1;94m13 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_error_when_many_to_many_is_not_possible_due_to_missing_id() {
let dml = r#"
// Post does not have @id
model Post {
slug Int @unique
categories Category[]
}
model Category {
id Int @id @default(autoincrement())
posts Post[]
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError validating field `posts` in model `Category`: The relation field `posts` on Model `Category` references `Post` which does not have an `@id` field. Models without `@id` cannot be part of a many to many relation. Use an explicit intermediate Model to represent this relationship.[0m
[1;94m-->[0m [4mschema.prisma:10[0m
[1;94m | [0m
[1;94m 9 | [0m id Int @id @default(autoincrement())
[1;94m10 | [0m [1;91mposts Post[][0m
[1;94m11 | [0m }
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(dml).map(drop).unwrap_err());
}
#[test]
fn must_allow_relations_with_default_native_types_with_annotation_on_one_side() {
let dm1 = indoc! {
r#"
datasource db {
provider = "mysql"
url = "mysql://"
}
model Blog {
id Int @id
authorId Int @db.Int
author User @relation(fields: [authorId], references: [id])
}
model User {
id Int @id
blogs Blog[]
}
"#
};
let dm2 = indoc! {
r#"
datasource db {
provider = "mysql"
url = "mysql://"
}
model Blog {
id Int @id
authorId Int
author User @relation(fields: [authorId], references: [id])
}
model User {
id Int @id @db.Int
blogs Blog[]
}
"#
|
datasource db {
provider = "mysql"
url = "mysql://"
}
model Blog {
id Int @id
authorId Int? @db.Int
author User? @relation(fields: [authorId], references: [id])
}
model User {
id Int @id @db.Int
blogs Blog[]
}
"#
};
for dm in &[dm1, dm2, dm3] {
assert!(
datamodel::parse_datamodel(dm).is_ok(),
"{:?}",
datamodel::parse_datamodel(dm).unwrap_err()
);
}
}
#[test]
fn a_one_on_one_relation_with_fields_on_the_wrong_side_should_not_pass() {
let schema = r#"
datasource db {
provider = "postgresql"
url = env("TEST_DATABASE_URL")
}
model Boom {
id Int @id
occurrence DateTime
bam Bam
}
model Bam {
id Int @id
boomId Int?
boom Boom? @relation(fields: [boomId], references: [id])
}
"#;
let expect = expect![[r#"
[1;91merror[0m: [1mError parsing attribute "@relation": The relation field `Bam.boom` defines the `fields` and/or `references` argument. You must set them on the required side of the relation (`Boom.bam`) in order for the constraints to be enforced. Alternatively, you can change this field to be required and the opposite optional, or make both sides of the relation optional.[0m
[1;94m-->[0m [4mschema.prisma:16[0m
[1;94m | [0m
[1;94m15 | [0m boomId Int?
[1;94m16 | [0m [1;91mboom Boom? @relation(fields: [boomId], references: [id])[0m
[1;94m17 | [0m}
[1;94m | [0m
"#]];
expect.assert_eq(&datamodel::parse_schema(schema).map(drop).unwrap_err());
}
|
};
let dm3 = indoc! {
r#"
|
a.rs
|
use std::io;
fn main() {
let (a, b) = {
let i = read::<usize>();
(i[0], i[1])
};
if a <= 8 && b <= 8 {
println!("Yay!");
} else {
println!(":(");
}
}
#[allow(dead_code)]
fn read<T>() -> Vec<T>
where T:
std::str::FromStr,
T::Err: std::fmt::Debug {
let mut buf = String::new();
io::stdin().read_line(&mut buf).unwrap();
buf.split_whitespace()
.map(|s| s.trim().parse().unwrap())
.collect()
}
#[allow(dead_code)]
fn read_one<T>() -> T
|
std::str::FromStr,
T::Err: std::fmt::Debug {
let mut buf = String::new();
io::stdin().read_line(&mut buf).unwrap();
buf.trim().parse().unwrap()
}
|
where T:
|
interfaces.ts
|
import {IMemoryStats} from "dyna-memory-stats";
export const enum EInstanceType {
APP = "APP",
STANDALONE_SERVICE = "STANDALONE_SERVICE",
MASTER_SERVICE = "MASTER_SERVICE",
WORKER_SERVICE = "WORKER_SERVICE",
}
|
serviceType: EInstanceType;
memoryStats: IMemoryStats;
availabilityIndex: number;
completedWorkload: number;
}
/*
* Add health stats.
* This is used for commands, to update the services with some stats of it.
* Send the COMMAND_addHealthStats with ICOMMAND_addHealthStats_data and that's all.
* */
export const COMMAND_addHealthStats: string = "COMMAND_addHealthStats";
export interface ICOMMAND_addHealthStats_data extends IInstanceStats {}
/*
* Monitor the stats
* Register your self, simply send a message with command COMMAND_registerNotificationHealthStats
* without args or data. You will start receiving multiple replies with command COMMAND_healthStatsUpdate
* and ICOMMAND_healthStatsUpdate_data as data.
*
* You have to reply on each receive a blank email to let the service know that you are alive.
* Simply call `dynaNodeClient.reply(message)` where message is the incomming message with the stats.
* If you don't reply withing 5secs the service will unregister you. You have to resend the
* COMMAND_registerNotificationHealthStats command.
*
* Sending this command, apply a timeout with replyTimeoutInMs, for instance 10000.
* If in 10secs you don't a response from the service then the connection is lost.
* You have to resend the command and start from the beginning.
* Resuming the data flow from the last received point is not currently supported.
* */
export const COMMAND_registerNotificationHealthStats: string = "COMMAND_registerNotificationHealthStats";
export const COMMAND_healthStatsUpdate: string = "COMMAND_healthStatsUpdate"; // note: you should reply a blank message as acknowledge
export interface ICOMMAND_healthStatsUpdate_data {
stats: IInstanceStats,
}
export const COMMAND_unregisterNotificationHealthStats: string = "COMMAND_unregisterNotificationHealthStats";
|
export interface IInstanceStats {
id: string;
time: number; // Date
|
webpack.config.base.js
|
import path from 'path';
export default {
module: {
loaders: [{
test: /\.jsx?$/,
loaders: ['babel-loader'],
exclude: /node_modules/
}, {
test: /\.json$/,
loader: 'json-loader'
}]
},
output: {
path: path.join(__dirname, 'app'),
filename: 'bundle.js',
libraryTarget: 'commonjs2'
},
|
},
plugins: [
],
externals: [
// put your node 3rd party libraries which can't be built with webpack here
// (mysql, mongodb, and so on..)
]
};
|
resolve: {
extensions: ['', '.js', '.jsx', '.json'],
packageMains: ['webpack', 'browser', 'web', 'browserify', ['jam', 'main'], 'main']
|
lr_scheduler.py
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import dataclasses
import math
import warnings
from functools import partial
from typing import Any, Dict, Optional, Union
import hydra
import torch.optim as optim
import torch.optim.lr_scheduler as pt_scheduler
import torch.utils.data.dataloader as dataloader
from omegaconf import DictConfig, OmegaConf
from torch.optim.lr_scheduler import _LRScheduler
from nemo.core.config import SchedulerParams, get_scheduler_config, register_scheduler_params
from nemo.utils import logging
class WarmupPolicy(_LRScheduler):
"""Adds warmup kwargs and warmup logic to lr policy.
All arguments should be passed as kwargs for clarity,
Args:
warmup_steps: Number of training steps in warmup stage
warmup_ratio: Ratio of warmup steps to total steps
max_steps: Total number of steps while training or `None` for
infinite training
"""
def __init__(self, optimizer, *, warmup_steps=None, warmup_ratio=None, max_steps=None, min_lr=0.0, last_epoch=-1):
assert not (
warmup_steps is not None and warmup_ratio is not None
), "Either use particular number of step or ratio"
assert warmup_ratio is None or max_steps is not None, "If there is a ratio, there should be a total steps"
# It is necessary to assign all attributes *before* __init__,
# as class is wrapped by an inner class.
self.max_steps = max_steps
if warmup_steps is not None:
self.warmup_steps = warmup_steps
elif warmup_ratio is not None:
self.warmup_steps = int(warmup_ratio * max_steps)
else:
self.warmup_steps = 0
self.min_lr = min_lr
super().__init__(optimizer, last_epoch)
def get_lr(self):
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
step = self.last_epoch
if step <= self.warmup_steps:
lr_val = (step + 1) / (self.warmup_steps + 1)
return [initial_lr * lr_val for initial_lr in self.base_lrs]
if step > self.max_steps:
return [self.min_lr for _ in self.base_lrs]
return self._get_lr(step)
def _get_lr(self, step):
"""Simple const lr policy"""
return self.base_lrs
class WarmupHoldPolicy(WarmupPolicy):
"""Variant of WarmupPolicy which maintains high learning rate for a defined number of steps.
All arguments should be passed as kwargs for clarity,
Args:
warmup_steps: Number of training steps in warmup stage
warmup_ratio: Ratio of warmup steps to total steps
hold_steps: Number of training steps to hold the learning rate after warm up
hold_ratio: Ratio of hold steps to total steps
max_steps: Total number of steps while training or `None` for
infinite training
"""
def __init__(
self,
optimizer,
*,
warmup_steps=None,
warmup_ratio=None,
hold_steps=None,
hold_ratio=None,
max_steps=None,
min_lr=0.0,
last_epoch=-1,
):
assert not (hold_steps is not None and hold_ratio is not None), "Either use particular number of step or ratio"
assert hold_ratio is None or max_steps is not None, "If there is a ratio, there should be a total steps"
self.min_lr = min_lr
self._last_warmup_lr = 0.0
# Necessary to duplicate as class attributes are hidden in inner class
self.max_steps = max_steps
if warmup_steps is not None:
self.warmup_steps = warmup_steps
elif warmup_ratio is not None:
self.warmup_steps = int(warmup_ratio * max_steps)
else:
self.warmup_steps = 0
if hold_steps is not None:
self.hold_steps = hold_steps + self.warmup_steps
elif hold_ratio is not None:
self.hold_steps = int(hold_ratio * max_steps) + self.warmup_steps
else:
self.hold_steps = 0
super().__init__(
optimizer,
warmup_steps=warmup_steps,
warmup_ratio=warmup_ratio,
max_steps=max_steps,
last_epoch=last_epoch,
min_lr=min_lr,
)
def get_lr(self):
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
step = self.last_epoch
# Warmup phase
if step <= self.warmup_steps:
lr_val = (step + 1) / (self.warmup_steps + 1)
return [initial_lr * lr_val for initial_lr in self.base_lrs]
# Hold phase
if (step >= self.warmup_steps) and (step < self.hold_steps):
return self.base_lrs
if step > self.max_steps:
return [self.min_lr for _ in self.base_lrs]
return self._get_lr(step)
def _squareroot_annealing(initial_lr, step, max_steps, min_lr):
mult = ((max_steps - step) / max_steps) ** 0.5
out_lr = initial_lr * mult
out_lr = max(out_lr, min_lr)
return out_lr
def _square_annealing(initial_lr, step, max_steps, min_lr):
mult = ((max_steps - step) / max_steps) ** 2
out_lr = initial_lr * mult
out_lr = max(out_lr, min_lr)
return out_lr
def _cosine_annealing(initial_lr, step, max_steps, min_lr):
mult = 0.5 * (1 + math.cos(math.pi * step / max_steps))
out_lr = (initial_lr - min_lr) * mult + min_lr
return out_lr
def _poly_decay(initial_lr, step, decay_steps, power, min_lr, cycle):
if cycle:
multiplier = 1.0 if step == 0 else math.ceil(step / decay_steps)
decay_steps *= multiplier
else:
step = min(step, decay_steps)
p = step / decay_steps
lr = (initial_lr - min_lr) * math.pow(1.0 - p, power)
lr += min_lr
return lr
class SquareAnnealing(WarmupPolicy):
|
class SquareRootAnnealing(WarmupPolicy):
def __init__(self, optimizer, *, max_steps, min_lr=0, last_epoch=-1, **kwargs):
super().__init__(optimizer=optimizer, max_steps=max_steps, last_epoch=last_epoch, min_lr=min_lr, **kwargs)
def _get_lr(self, step):
new_lrs = [
_squareroot_annealing(initial_lr=initial_lr, step=step, max_steps=self.max_steps, min_lr=self.min_lr,)
for initial_lr in self.base_lrs
]
return new_lrs
class CosineAnnealing(WarmupPolicy):
def __init__(self, optimizer, *, max_steps, min_lr=0, last_epoch=-1, **kwargs):
super().__init__(optimizer=optimizer, max_steps=max_steps, last_epoch=last_epoch, min_lr=min_lr, **kwargs)
def _get_lr(self, step):
for initial_lr in self.base_lrs:
if initial_lr < self.min_lr:
raise ValueError(
f"{self} received an initial learning rate that " f"was lower than the minimum learning rate."
)
new_lrs = [
_cosine_annealing(
initial_lr=initial_lr,
step=step - self.warmup_steps,
max_steps=self.max_steps - self.warmup_steps,
min_lr=self.min_lr,
)
for initial_lr in self.base_lrs
]
return new_lrs
class WarmupAnnealing(WarmupPolicy):
def __init__(self, optimizer, *, max_steps, last_epoch=-1, min_lr=0.0, **kwargs):
super().__init__(optimizer=optimizer, max_steps=max_steps, last_epoch=last_epoch, min_lr=min_lr, **kwargs)
def _get_lr(self, step):
progress = float(step / self.max_steps)
warmup_ratio = float(self.warmup_steps / self.max_steps)
mult = max((progress - 1.0) / (warmup_ratio - 1.0), 0.0)
out_lr = [initial_lr * mult for initial_lr in self.base_lrs]
return out_lr
class InverseSquareRootAnnealing(WarmupPolicy):
def __init__(self, optimizer, *, max_steps, last_epoch=-1, min_lr=0.0, **kwargs):
super().__init__(optimizer=optimizer, max_steps=max_steps, **kwargs, last_epoch=last_epoch, min_lr=min_lr)
def _get_lr(self, step):
denom = ((step + 1) / (self.warmup_steps + 1)) ** 0.5
out_lr = [initial_lr / denom for initial_lr in self.base_lrs]
return out_lr
class PolynomialDecayAnnealing(WarmupPolicy):
def __init__(self, optimizer, *, max_steps, min_lr=0.0, power=1.0, cycle=False, last_epoch=-1, **kwargs):
self.power = power
self.cycle = cycle
super().__init__(optimizer=optimizer, max_steps=max_steps, last_epoch=last_epoch, min_lr=min_lr, **kwargs)
def _get_lr(self, step):
new_lrs = [
_poly_decay(
initial_lr,
step=step - self.warmup_steps,
decay_steps=self.max_steps - self.warmup_steps,
power=self.power,
min_lr=self.min_lr,
cycle=self.cycle,
)
for initial_lr in self.base_lrs
]
return new_lrs
class PolynomialHoldDecayAnnealing(WarmupHoldPolicy):
def __init__(self, optimizer, *, max_steps, min_lr=0.0, power=1.0, cycle=False, last_epoch=-1, **kwargs):
self.power = power
self.cycle = cycle
super().__init__(optimizer=optimizer, max_steps=max_steps, last_epoch=last_epoch, min_lr=min_lr, **kwargs)
def _get_lr(self, step):
new_lrs = [
_poly_decay(
initial_lr,
step=step - self.hold_steps,
decay_steps=self.max_steps - max(self.warmup_steps, self.hold_steps),
power=self.power,
min_lr=self.min_lr,
cycle=self.cycle,
)
for initial_lr in self.base_lrs
]
return new_lrs
def register_scheduler(name: str, scheduler: _LRScheduler, scheduler_params: SchedulerParams):
"""
Checks if the scheduler name exists in the registry, and if it doesnt, adds it.
This allows custom schedulers to be added and called by name during instantiation.
Args:
name: Name of the optimizer. Will be used as key to retrieve the optimizer.
scheduler: Scheduler class (inherits from _LRScheduler)
scheduler_params: The parameters as a dataclass of the scheduler
"""
if name in AVAILABLE_SCHEDULERS:
raise ValueError(f"Cannot override pre-existing schedulers. Conflicting scheduler name = {name}")
AVAILABLE_SCHEDULERS[name] = scheduler
sched_name = "{}_params".format(scheduler.__name__)
register_scheduler_params(name=sched_name, scheduler_params=scheduler_params)
def get_scheduler(name: str, **kwargs: Optional[Dict[str, Any]]) -> _LRScheduler:
"""
Convenience method to obtain an _LRScheduler class and partially instantiate it with optimizer kwargs.
Args:
name: Name of the scheduler in the registry.
kwargs: Optional kwargs of the scheduler used during instantiation.
Returns:
a partially instantiated _LRScheduler
"""
if name not in AVAILABLE_SCHEDULERS:
raise ValueError(
f"Cannot resolve scheduler{name}'. Available optimizers are : " f"{AVAILABLE_SCHEDULERS.keys()}"
)
scheduler_cls = AVAILABLE_SCHEDULERS[name]
scheduler = partial(scheduler_cls, **kwargs)
return scheduler
def prepare_lr_scheduler(
optimizer: optim.Optimizer,
scheduler_config: Union[Dict[str, Any], DictConfig],
train_dataloader: Optional[dataloader.DataLoader] = None,
) -> Optional[Dict[str, Any]]:
"""
Constructs an LR Scheduler (optionally) for a given optimizer, based on a config with the following schema
optim:
name: <name of optimizer>
lr: <maximal learning rate>
# <additional optimizer arguments>
args:
name: auto # special keyword, resolves to correct optimizer config for given optimizer name
# cls: nemo.core.config.optimizers.NovogradParams # explicit instantiation by class path
params: # optional override parameters for the optimizer config
betas: [0.8, 0.5]
weight_decay: 0.001
# scheduler setup
sched:
name: <name of scheduler>
iters_per_batch: null # computed at runtime; mandatory to have
max_steps: null # computed at runtime or explicitly set here; mandatory to have
# pytorch lightning args <mandatory>
monitor: val_loss
reduce_on_plateau: false
# <scheduler config override>
args:
name: auto # special keyword, resolves to correct optimizer config for given optimizer name
# cls: nemo.core.config.schedulers.CosineAnnealingParams # explicit instantiation by class path
params: # optional override parameters for the optimizer config
warmup_steps: null
warmup_ratio: null
min_lr: 0.0
last_epoch: -1
Args:
optimizer: An instantiated Optimizer.
scheduler_config: A dictionary / config dict which follows the above schema.
train_dataloader: Optional requirement, must be passed if "iters_per_batch" is defined
instead of "max_steps". Used to compute effective "max_steps".
Returns:
A dictionary containing the LR Scheduler implementation if the config was successfully parsed
along with other parameters required by Pytorch Lightning, otherwise None.
"""
# Build nested dictionary for convenience out of structured objects
if isinstance(scheduler_config, DictConfig):
scheduler_config = OmegaConf.to_container(scheduler_config, resolve=True)
elif dataclasses.is_dataclass(scheduler_config):
# Recursively transform data classes to basic dictionaries
scheduler_config = OmegaConf.create(scheduler_config)
scheduler_config = OmegaConf.to_container(scheduler_config, resolve=True)
# Test to see if config follows above schema
if scheduler_config is not None:
if 'args' in scheduler_config:
scheduler_args = scheduler_config.pop('args')
else:
scheduler_args = copy.deepcopy(scheduler_config)
# Remove extra parameters from scheduler_args nest
# Assume all other parameters are to be passed into scheduler constructor
scheduler_args.pop('name', None)
scheduler_args.pop('iters_per_batch', None)
scheduler_args.pop('monitor', None)
scheduler_args.pop('reduce_on_plateau', None)
else:
# Return gracefully in case `sched` was not supplied; inform user
logging.info('Scheduler not initialized as no `sched` config supplied to setup_optimizer()')
return None
# Try instantiation of scheduler params from config class path
try:
scheduler_args_cfg = OmegaConf.create(scheduler_args)
scheduler_conf = hydra.utils.instantiate(scheduler_args_cfg)
scheduler_args = vars(scheduler_conf)
# Get name of the scheduler
scheduler_name = scheduler_conf.__class__.__name__
if 'Params' in scheduler_name:
scheduler_name = scheduler_name.replace('Params', '')
except Exception:
# Class path instantiation failed; try resolving "name" component
# Get name of the scheduler
if 'name' in scheduler_config:
scheduler_name = scheduler_config['name']
else:
logging.warning(
"Could not resolve classpath for Scheduler Config, and `name` "
"was not provided either. \n"
"Scheduler cannot be instantiated !"
)
return None
# If class path was not provided, perhaps `name` is provided for resolution
if 'name' in scheduler_args:
# If `auto` is passed as name for resolution of optimizer name,
# then lookup optimizer name and resolve its parameter config
if scheduler_args['name'] == 'auto':
scheduler_params_name = "{}Params".format(scheduler_name)
else:
scheduler_params_name = scheduler_args['name']
# Get override arguments provided in the config yaml file / Dict Config
scheduler_params_override = scheduler_args.get('params', {})
# If params is itself a dict config object provided explicitly in Dict Config
# Resolve to dictionary for convenience
if isinstance(scheduler_params_override, DictConfig):
scheduler_params_override = OmegaConf.to_container(scheduler_params_override, resolve=True)
# Get and instantiate the Config dataclass for this scheduler
scheduler_params_cls = get_scheduler_config(scheduler_params_name, **scheduler_params_override)
scheduler_params = scheduler_params_cls() # instantiate the parameters object
scheduler_args = vars(scheduler_params) # extract just the dictionary from the Config object
else:
# assume the input dictionary is schedular args (from dataclasses / omegaconf)
pass
# Extract value to monitor in losses, if provided.
if 'monitor' in scheduler_config:
monitor = scheduler_config.get('monitor')
else:
# Default to train loss
monitor = 'loss'
# Store exact max_steps if it is provided
if 'max_steps' in scheduler_config and scheduler_config['max_steps'] is not None:
max_steps = scheduler_config['max_steps']
elif 'iters_per_batch' in scheduler_config:
# Compute effective max_steps if iters_per_batch is provided
if train_dataloader is None:
logging.warning(
'As `iters_per_batch` is provided/computed, it is required to pass the train dataloader in order\n'
'to compute effective maximum number of steps.\n'
'Scheduler will not be instantiated !'
)
return None
# Raise exception if neither `max_steps` nor `iters_per_batch` is provided
if scheduler_config.get('iters_per_batch', None) is None:
logging.warning(
"`iters_per_batch` cannot be None when `max_steps` is not not provided.\n"
"This can occur when `train dataloader` is not available to correctly "
"prepare the scheduler.\n"
"Scheduler will not be instantiated !"
)
return None
# Get iters_per_batch
iters_per_batch = scheduler_config.get('iters_per_batch')
# Compute effective num max_steps
num_samples = len(train_dataloader.dataset)
batch_size = train_dataloader.batch_size
max_steps = round(num_samples * iters_per_batch / float(batch_size))
else:
logging.warning(
"Neither `max_steps` nor `iters_per_batch` were provided to `optim.sched`, "
"cannot compute effective `max_steps` !\n"
"Scheduler will not be instantiated !"
)
return None
# Inject max_steps (effective or provided) into the scheduler config
scheduler_args['max_steps'] = max_steps
# Get the scheduler class from the config
scheduler_cls = get_scheduler(scheduler_name, **scheduler_args)
# Instantiate the LR schedule
schedule = scheduler_cls(optimizer, **scheduler_args)
logging.info(
'Scheduler "%s" \nwill be used during training (effective maximum steps = %d) - \nParameters : \n(%s)',
str(schedule),
max_steps,
OmegaConf.to_yaml(OmegaConf.create(scheduler_args)),
)
# Wrap the schedule in PTL arguments to perform stepwise computation
# Rather than epoch level computation
if isinstance(schedule, optim.lr_scheduler.ReduceLROnPlateau):
reduce_lr_on_plateau = True
else:
reduce_lr_on_plateau = False
schedule_dict = {
'scheduler': schedule,
'interval': 'step',
'frequency': 1,
'monitor': monitor,
'reduce_on_plateau': reduce_lr_on_plateau,
}
return schedule_dict
AVAILABLE_SCHEDULERS = {
'WarmupPolicy': WarmupPolicy,
'WarmupHoldPolicy': WarmupHoldPolicy,
'SquareAnnealing': SquareAnnealing,
'CosineAnnealing': CosineAnnealing,
'WarmupAnnealing': WarmupAnnealing,
'InverseSquareRootAnnealing': InverseSquareRootAnnealing,
'SquareRootAnnealing': SquareRootAnnealing,
'PolynomialDecayAnnealing': PolynomialDecayAnnealing,
'PolynomialHoldDecayAnnealing': PolynomialHoldDecayAnnealing,
'StepLR': pt_scheduler.StepLR,
'ExponentialLR': pt_scheduler.ExponentialLR,
'ReduceLROnPlateau': pt_scheduler.ReduceLROnPlateau,
'CyclicLR': pt_scheduler.CyclicLR,
}
|
def __init__(self, optimizer, *, max_steps, min_lr=1e-5, last_epoch=-1, **kwargs):
super().__init__(optimizer=optimizer, max_steps=max_steps, last_epoch=last_epoch, min_lr=min_lr, **kwargs)
def _get_lr(self, step):
new_lrs = [
_square_annealing(
initial_lr=initial_lr,
step=step - self.warmup_steps,
max_steps=self.max_steps - self.warmup_steps,
min_lr=self.min_lr,
)
for initial_lr in self.base_lrs
]
return new_lrs
|
collection.rs
|
use crate::plan::MutatorContext;
use crate::scheduler::gc_works::ProcessEdgesWork;
use crate::scheduler::*;
use crate::util::OpaquePointer;
use crate::vm::VMBinding;
use crate::MMTK;
/// VM-specific methods for garbage collection.
pub trait Collection<VM: VMBinding> {
/// Stop all the mutator threads. MMTk calls this method when it requires all the mutator to yield for a GC.
/// This method is called by a single thread in MMTk (the GC controller).
|
/// The actual thread synchronization mechanism is up to the VM, and MMTk does not make assumptions on that.
///
/// Arguments:
/// * `tls`: The thread pointer for the GC controller/coordinator.
fn stop_all_mutators<E: ProcessEdgesWork<VM = VM>>(tls: OpaquePointer);
/// Resume all the mutator threads, the opposite of the above. When a GC is finished, MMTk calls this method.
///
/// Arguments:
/// * `tls`: The thread pointer for the GC controller/coordinator.
fn resume_mutators(tls: OpaquePointer);
/// Block the current thread for GC. This is called when an allocation request cannot be fulfilled and a GC
/// is needed. MMTk calls this method to inform the VM that the current thread needs to be blocked as a GC
/// is going to happen. Then MMTk starts a GC. For a stop-the-world GC, MMTk will then call `stop_all_mutators()`
/// before the GC, and call `resume_mutators()` after the GC.
///
/// Arguments:
/// * `tls`: The current thread pointer that should be blocked. The VM can optionally check if the current thread matches `tls`.
fn block_for_gc(tls: OpaquePointer);
/// Ask the VM to spawn a GC thread for MMTk. A GC thread may later call into the VM through these VM traits. Some VMs
/// have assumptions that those calls needs to be within VM internal threads.
/// As a result, MMTk does not spawn GC threads itself to avoid breaking this kind of assumptions.
/// MMTk calls this method to spawn GC threads during [`enable_collection()`](../memory_manager/fn.enable_collection.html).
///
/// Arguments:
/// * `tls`: The thread pointer for the parent thread that we spawn new threads from. This is the same `tls` when the VM
/// calls `enable_collection()` and passes as an argument.
/// * `ctx`: The GC worker context for the GC thread. If `None` is passed, it means spawning a GC thread for the GC controller,
/// which does not have a worker context.
fn spawn_worker_thread(tls: OpaquePointer, ctx: Option<&Worker<MMTK<VM>>>);
/// Allow VM-specific behaviors for a mutator after all the mutators are stopped and before any actual GC work starts.
///
/// Arguments:
/// * `tls`: The thread pointer for a mutator thread.
/// * `m`: The mutator context for the thread.
fn prepare_mutator<T: MutatorContext<VM>>(tls: OpaquePointer, m: &T);
/// Inform the VM for an out-of-memory error. The VM can implement its own error routine for OOM.
///
/// Arguments:
/// * `tls`: The thread pointer for the mutator which failed the allocation and triggered the OOM.
fn out_of_memory(_tls: OpaquePointer) {
panic!("Out of memory!");
}
}
|
/// This method should not return until all the threads are yielded.
|
opsworkshaproxylayer.go
|
/*
Copyright The Kubeform Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1alpha1
import (
"context"
"time"
v1alpha1 "kubeform.dev/kubeform/apis/aws/v1alpha1"
scheme "kubeform.dev/kubeform/client/clientset/versioned/scheme"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
rest "k8s.io/client-go/rest"
)
// OpsworksHaproxyLayersGetter has a method to return a OpsworksHaproxyLayerInterface.
// A group's client should implement this interface.
type OpsworksHaproxyLayersGetter interface {
OpsworksHaproxyLayers(namespace string) OpsworksHaproxyLayerInterface
}
// OpsworksHaproxyLayerInterface has methods to work with OpsworksHaproxyLayer resources.
type OpsworksHaproxyLayerInterface interface {
Create(ctx context.Context, opsworksHaproxyLayer *v1alpha1.OpsworksHaproxyLayer, opts v1.CreateOptions) (*v1alpha1.OpsworksHaproxyLayer, error)
Update(ctx context.Context, opsworksHaproxyLayer *v1alpha1.OpsworksHaproxyLayer, opts v1.UpdateOptions) (*v1alpha1.OpsworksHaproxyLayer, error)
UpdateStatus(ctx context.Context, opsworksHaproxyLayer *v1alpha1.OpsworksHaproxyLayer, opts v1.UpdateOptions) (*v1alpha1.OpsworksHaproxyLayer, error)
Delete(ctx context.Context, name string, opts v1.DeleteOptions) error
DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error
Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.OpsworksHaproxyLayer, error)
List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.OpsworksHaproxyLayerList, error)
Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error)
Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.OpsworksHaproxyLayer, err error)
OpsworksHaproxyLayerExpansion
}
// opsworksHaproxyLayers implements OpsworksHaproxyLayerInterface
type opsworksHaproxyLayers struct {
client rest.Interface
ns string
}
// newOpsworksHaproxyLayers returns a OpsworksHaproxyLayers
func newOpsworksHaproxyLayers(c *AwsV1alpha1Client, namespace string) *opsworksHaproxyLayers
|
// Get takes name of the opsworksHaproxyLayer, and returns the corresponding opsworksHaproxyLayer object, and an error if there is any.
func (c *opsworksHaproxyLayers) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.OpsworksHaproxyLayer, err error) {
result = &v1alpha1.OpsworksHaproxyLayer{}
err = c.client.Get().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
Name(name).
VersionedParams(&options, scheme.ParameterCodec).
Do(ctx).
Into(result)
return
}
// List takes label and field selectors, and returns the list of OpsworksHaproxyLayers that match those selectors.
func (c *opsworksHaproxyLayers) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.OpsworksHaproxyLayerList, err error) {
var timeout time.Duration
if opts.TimeoutSeconds != nil {
timeout = time.Duration(*opts.TimeoutSeconds) * time.Second
}
result = &v1alpha1.OpsworksHaproxyLayerList{}
err = c.client.Get().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
VersionedParams(&opts, scheme.ParameterCodec).
Timeout(timeout).
Do(ctx).
Into(result)
return
}
// Watch returns a watch.Interface that watches the requested opsworksHaproxyLayers.
func (c *opsworksHaproxyLayers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) {
var timeout time.Duration
if opts.TimeoutSeconds != nil {
timeout = time.Duration(*opts.TimeoutSeconds) * time.Second
}
opts.Watch = true
return c.client.Get().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
VersionedParams(&opts, scheme.ParameterCodec).
Timeout(timeout).
Watch(ctx)
}
// Create takes the representation of a opsworksHaproxyLayer and creates it. Returns the server's representation of the opsworksHaproxyLayer, and an error, if there is any.
func (c *opsworksHaproxyLayers) Create(ctx context.Context, opsworksHaproxyLayer *v1alpha1.OpsworksHaproxyLayer, opts v1.CreateOptions) (result *v1alpha1.OpsworksHaproxyLayer, err error) {
result = &v1alpha1.OpsworksHaproxyLayer{}
err = c.client.Post().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
VersionedParams(&opts, scheme.ParameterCodec).
Body(opsworksHaproxyLayer).
Do(ctx).
Into(result)
return
}
// Update takes the representation of a opsworksHaproxyLayer and updates it. Returns the server's representation of the opsworksHaproxyLayer, and an error, if there is any.
func (c *opsworksHaproxyLayers) Update(ctx context.Context, opsworksHaproxyLayer *v1alpha1.OpsworksHaproxyLayer, opts v1.UpdateOptions) (result *v1alpha1.OpsworksHaproxyLayer, err error) {
result = &v1alpha1.OpsworksHaproxyLayer{}
err = c.client.Put().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
Name(opsworksHaproxyLayer.Name).
VersionedParams(&opts, scheme.ParameterCodec).
Body(opsworksHaproxyLayer).
Do(ctx).
Into(result)
return
}
// UpdateStatus was generated because the type contains a Status member.
// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus().
func (c *opsworksHaproxyLayers) UpdateStatus(ctx context.Context, opsworksHaproxyLayer *v1alpha1.OpsworksHaproxyLayer, opts v1.UpdateOptions) (result *v1alpha1.OpsworksHaproxyLayer, err error) {
result = &v1alpha1.OpsworksHaproxyLayer{}
err = c.client.Put().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
Name(opsworksHaproxyLayer.Name).
SubResource("status").
VersionedParams(&opts, scheme.ParameterCodec).
Body(opsworksHaproxyLayer).
Do(ctx).
Into(result)
return
}
// Delete takes name of the opsworksHaproxyLayer and deletes it. Returns an error if one occurs.
func (c *opsworksHaproxyLayers) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
Name(name).
Body(&opts).
Do(ctx).
Error()
}
// DeleteCollection deletes a collection of objects.
func (c *opsworksHaproxyLayers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {
var timeout time.Duration
if listOpts.TimeoutSeconds != nil {
timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second
}
return c.client.Delete().
Namespace(c.ns).
Resource("opsworkshaproxylayers").
VersionedParams(&listOpts, scheme.ParameterCodec).
Timeout(timeout).
Body(&opts).
Do(ctx).
Error()
}
// Patch applies the patch and returns the patched opsworksHaproxyLayer.
func (c *opsworksHaproxyLayers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.OpsworksHaproxyLayer, err error) {
result = &v1alpha1.OpsworksHaproxyLayer{}
err = c.client.Patch(pt).
Namespace(c.ns).
Resource("opsworkshaproxylayers").
Name(name).
SubResource(subresources...).
VersionedParams(&opts, scheme.ParameterCodec).
Body(data).
Do(ctx).
Into(result)
return
}
|
{
return &opsworksHaproxyLayers{
client: c.RESTClient(),
ns: namespace,
}
}
|
decorator.py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.prompting import NoTTYException, prompt, prompt_pass
from knack.log import get_logger
from typing import Any, List, Dict, Tuple, Union
from azure.cli.core import AzCommandsLoader
from azure.cli.core.azclierror import (
CLIInternalError,
MutuallyExclusiveArgumentError,
RequiredArgumentMissingError,
InvalidArgumentValueError,
NoTTYError,
)
from azure.cli.core.commands import AzCliCommand
from azure.cli.core.profiles import ResourceType
from .custom import (
_get_rg_location,
_validate_ssh_key,
_get_default_dns_prefix,
_set_vm_set_type,
set_load_balancer_sku,
get_subscription_id,
_ensure_aks_service_principal,
)
logger = get_logger(__name__)
def safe_list_get(li: List, idx: int, default: Any = None):
# Attempt to get the element with index `idx` from an object `li` (which should be a `list`),
# if the index is invalid (like out of range), return `default` (whose default value is `None`)
if isinstance(li, list):
try:
return li[idx]
except IndexError:
return default
return None
# pylint: disable=too-many-instance-attributes,too-few-public-methods
class AKSCreateModels:
# Used to store models (i.e. the corresponding class of a certain api version specified by `resource_type`)
# which would be used during the creation process.
def __init__(
self,
cmd: AzCommandsLoader,
resource_type: ResourceType = ResourceType.MGMT_CONTAINERSERVICE,
):
self.__cmd = cmd
self.resource_type = resource_type
self.ManagedClusterWindowsProfile = self.__cmd.get_models(
"ManagedClusterWindowsProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedClusterSKU = self.__cmd.get_models(
"ManagedClusterSKU",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ContainerServiceNetworkProfile = self.__cmd.get_models(
"ContainerServiceNetworkProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ContainerServiceLinuxProfile = self.__cmd.get_models(
"ContainerServiceLinuxProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedClusterServicePrincipalProfile = self.__cmd.get_models(
"ManagedClusterServicePrincipalProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ContainerServiceSshConfiguration = self.__cmd.get_models(
"ContainerServiceSshConfiguration",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ContainerServiceSshPublicKey = self.__cmd.get_models(
"ContainerServiceSshPublicKey",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedClusterAADProfile = self.__cmd.get_models(
"ManagedClusterAADProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedClusterAutoUpgradeProfile = self.__cmd.get_models(
"ManagedClusterAutoUpgradeProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedClusterAgentPoolProfile = self.__cmd.get_models(
"ManagedClusterAgentPoolProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedClusterIdentity = self.__cmd.get_models(
"ManagedClusterIdentity",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.UserAssignedIdentity = self.__cmd.get_models(
"UserAssignedIdentity",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedCluster = self.__cmd.get_models(
"ManagedCluster",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ManagedServiceIdentityUserAssignedIdentitiesValue = (
self.__cmd.get_models(
"ManagedServiceIdentityUserAssignedIdentitiesValue",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
)
self.ExtendedLocation = self.__cmd.get_models(
"ExtendedLocation",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
self.ExtendedLocationTypes = self.__cmd.get_models(
"ExtendedLocationTypes",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
# not directly used
self.ManagedClusterAPIServerAccessProfile = self.__cmd.get_models(
"ManagedClusterAPIServerAccessProfile",
resource_type=self.resource_type,
operation_group="managed_clusters",
)
# pylint: disable=too-many-public-methods
class AKSCreateContext:
# Used to store intermediate variables (usually this stores the dynamically completed value of the parameter,
# which has not been decorated into the `mc` object, and some pure intermediate variables (such as the
# subscription ID)) and a copy of the original function parameters, and provide "getter" methods for all
# parameters.
# To dynamically complete a parameter or check the validity of a parameter, please provide a "getter" function
# named `get_xxx`, where `xxx` is the parameter name. In this function, the process of obtaining parameter
# values, dynamic completion (optional), and validation (optional) should be followed. The obtaining of
# parameter values should further follow the order of obtaining from the `mc` object, from the intermediates,
# or from the original value.
# Note: Dynamic completion will also perform some operations that regulate parameter values, such as
# converting int 0 to None.
# Attention: In case of checking the validity of parameters, be sure not to set the `enable_validation` to
# `True` to avoid loop calls, when using the getter function to obtain the value of other parameters.
# Attention: After the parameter is dynamically completed, it must be added to the intermediates; and after
# the parameter is decorated into the `mc` object, the corresponding intermediate should be deleted.
# Attention: One of the most basic principles is that when the parameter/profile is decorated into the `mc`
# object, it should never be modified, only read-only operations (e.g. validation) can be performed.
def __init__(self, cmd: AzCliCommand, raw_parameters: Dict):
self.cmd = cmd
if not isinstance(raw_parameters, dict):
raise CLIInternalError(
"Unexpected raw_parameters object with type '{}'.".format(
type(raw_parameters)
)
)
self.raw_param = raw_parameters
self.intermediates = dict()
self.mc = None
def attach_mc(self, mc):
if self.mc is None:
self.mc = mc
else:
msg = "the same" if self.mc == mc else "different"
raise CLIInternalError(
"Attempting to attach the `mc` object again, the two objects are {}.".format(
msg
)
)
def get_intermediate(self, variable_name: str, default_value: Any = None):
if variable_name not in self.intermediates:
msg = "The intermediate '{}' does not exist, return default value '{}'.".format(
variable_name, default_value
)
logger.debug(msg)
return self.intermediates.get(variable_name, default_value)
def set_intermediate(
self, variable_name: str, value: Any, overwrite_exists: bool = False
):
if variable_name in self.intermediates:
if overwrite_exists:
msg = "The intermediate '{}' is overwritten. Original value: '{}', new value: '{}'.".format(
variable_name, self.intermediates.get(variable_name), value
)
logger.debug(msg)
self.intermediates[variable_name] = value
elif self.intermediates.get(variable_name) != value:
msg = "The intermediate '{}' already exists, but overwrite is not enabled." \
"Original value: '{}', candidate value: '{}'.".format(
variable_name,
self.intermediates.get(variable_name),
value,
)
# warning level log will be output to the console, which may cause confusion to users
logger.warning(msg)
else:
self.intermediates[variable_name] = value
def remove_intermediate(self, variable_name: str):
self.intermediates.pop(variable_name, None)
# pylint: disable=unused-argument
def get_resource_group_name(self, **kwargs) -> str:
"""Obtain the value of resource_group_name.
Note: resource_group_name will not be decorated into the `mc` object.
The value of this parameter should be provided by user explicitly.
:return: string
"""
# read the original value passed by the command
resource_group_name = self.raw_param.get("resource_group_name")
# this parameter does not need dynamic completion
# this parameter does not need validation
return resource_group_name
# pylint: disable=unused-argument
def get_name(self, **kwargs) -> str:
"""Obtain the value of name.
Note: name will not be decorated into the `mc` object.
The value of this parameter should be provided by user explicitly.
:return: string
"""
# read the original value passed by the command
name = self.raw_param.get("name")
# this parameter does not need dynamic completion
# this parameter does not need validation
return name
# pylint: disable=unused-argument
def get_ssh_key_value(
self, enable_validation: bool = False, **kwargs
) -> str:
"""Obtain the value of ssh_key_value.
If the user does not specify this parameter, the validator function "validate_ssh_key" checks the default file
location "~/.ssh/id_rsa.pub", if the file exists, read its content and return; otherise, create a key pair at
"~/.ssh/id_rsa.pub" and return the public key.
If the user provides a string-like input, the validator function "validate_ssh_key" checks whether it is a file
path, if so, read its content and return; if it is a valid public key, return it; otherwise, create a key pair
there and return the public key.
This function supports the option of enable_validation. When enabled, it will call "_validate_ssh_key" to
verify the validity of ssh_key_value. If parameter no_ssh_key is set to True, verification will be skipped;
otherwise, a CLIError will be raised when the value of ssh_key_value is invalid.
:return: string
"""
# read the original value passed by the command
raw_value = self.raw_param.get("ssh_key_value")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if (
self.mc and
self.mc.linux_profile and
self.mc.linux_profile.ssh and
self.mc.linux_profile.ssh.public_keys
):
public_key_obj = safe_list_get(
self.mc.linux_profile.ssh.public_keys, 0, None
)
if public_key_obj:
value_obtained_from_mc = public_key_obj.key_data
# set default value
if value_obtained_from_mc is not None:
ssh_key_value = value_obtained_from_mc
else:
ssh_key_value = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
_validate_ssh_key(
no_ssh_key=self.get_no_ssh_key(), ssh_key_value=ssh_key_value
)
return ssh_key_value
# pylint: disable=unused-argument
def get_dns_name_prefix(
self, enable_validation: bool = False, **kwargs
) -> Union[str, None]:
"""Dynamically obtain the value of ssh_key_value according to the context.
When both dns_name_prefix and fqdn_subdomain are not assigned, dynamic completion will be triggerd. Function
"_get_default_dns_prefix" will be called to create a default dns_name_prefix composed of name(cluster),
resource_group_name, and subscription_id.
This function supports the option of enable_validation. When enabled, it will check if both dns_name_prefix and
fqdn_subdomain are assigend, if so, raise the MutuallyExclusiveArgumentError.
This function supports the option of read_only. When enabled, it will skip dynamic completion and validation.
:return: string or None
"""
parameter_name = "dns_name_prefix"
# read the original value passed by the command
raw_value = self.raw_param.get(parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc:
value_obtained_from_mc = self.mc.dns_prefix
# set default value
read_from_mc = False
if value_obtained_from_mc is not None:
dns_name_prefix = value_obtained_from_mc
read_from_mc = True
else:
dns_name_prefix = raw_value
# skip dynamic completion & validation if option read_only is specified
if kwargs.get("read_only"):
return dns_name_prefix
dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
if not dns_name_prefix and not self.get_fqdn_subdomain():
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = dynamic_completion and not read_from_mc
# In case the user does not specify the parameter and it meets the conditions of automatic completion,
# necessary information is dynamically completed.
if dynamic_completion:
dns_name_prefix = _get_default_dns_prefix(
name=self.get_name(),
resource_group_name=self.get_resource_group_name(),
subscription_id=self.get_intermediate("subscription_id"),
)
# validation
if enable_validation:
if dns_name_prefix and self.get_fqdn_subdomain():
raise MutuallyExclusiveArgumentError(
"--dns-name-prefix and --fqdn-subdomain cannot be used at same time"
)
return dns_name_prefix
# pylint: disable=unused-argument
def get_location(self, **kwargs) -> str:
"""Dynamically obtain the value of location according to the context.
When location is not assigned, dynamic completion will be triggerd. Function "_get_rg_location" will be called
to get the location of the provided resource group, which internally used ResourceManagementClient to send
the request.
:return: string
"""
parameter_name = "location"
# read the original value passed by the command
raw_value = self.raw_param.get(parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc:
value_obtained_from_mc = self.mc.location
# set default value
read_from_mc = False
if value_obtained_from_mc is not None:
location = value_obtained_from_mc
read_from_mc = True
else:
location = raw_value
dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
if location is None:
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = dynamic_completion and not read_from_mc
if dynamic_completion:
location = _get_rg_location(
self.cmd.cli_ctx, self.get_resource_group_name()
)
# this parameter does not need validation
return location
# pylint: disable=unused-argument
def get_kubernetes_version(self, **kwargs) -> str:
"""Obtain the value of kubernetes_version.
:return: string
"""
# read the original value passed by the command
raw_value = self.raw_param.get("kubernetes_version")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc:
value_obtained_from_mc = self.mc.kubernetes_version
# set default value
if value_obtained_from_mc is not None:
kubernetes_version = value_obtained_from_mc
else:
kubernetes_version = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return kubernetes_version
# pylint: disable=unused-argument
def get_no_ssh_key(self, enable_validation: bool = False, **kwargs) -> bool:
"""Obtain the value of name.
Note: no_ssh_key will not be decorated into the `mc` object.
This function supports the option of enable_validation. When enabled, it will check if both dns_name_prefix and
fqdn_subdomain are assigend, if so, raise the MutuallyExclusiveArgumentError.
This function supports the option of enable_validation. When enabled, it will call "_validate_ssh_key" to
verify the validity of ssh_key_value. If parameter no_ssh_key is set to True, verification will be skipped;
otherwise, a CLIError will be raised when the value of ssh_key_value is invalid.
:return: bool
"""
# read the original value passed by the command
no_ssh_key = self.raw_param.get("no_ssh_key")
# this parameter does not need dynamic completion
# validation
if enable_validation:
_validate_ssh_key(
no_ssh_key=no_ssh_key, ssh_key_value=self.get_ssh_key_value()
)
return no_ssh_key
# pylint: disable=unused-argument
def get_vm_set_type(self, **kwargs) -> str:
"""Dynamically obtain the value of vm_set_type according to the context.
Dynamic completion will be triggerd by default. Function "_set_vm_set_type" will be called and the
corresponding vm set type will be returned according to the value of kubernetes_version. It will also
normalize the value as server validation is case-sensitive.
This function supports the option of read_only. When enabled, it will skip dynamic completion and validation.
:return: string
"""
parameter_name = "vm_set_type"
# read the original value passed by the command
raw_value = self.raw_param.get(parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.type
# set default value
read_from_mc = False
if value_obtained_from_mc is not None:
vm_set_type = value_obtained_from_mc
read_from_mc = True
else:
vm_set_type = raw_value
# skip dynamic completion & validation if option read_only is specified
if kwargs.get("read_only"):
return vm_set_type
# the value verified by the validator may have case problems, and the
# "_set_vm_set_type" function will adjust it
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = dynamic_completion and not read_from_mc
if dynamic_completion:
vm_set_type = _set_vm_set_type(
vm_set_type=vm_set_type,
kubernetes_version=self.get_kubernetes_version(),
)
# this parameter does not need validation
return vm_set_type
# pylint: disable=unused-argument
def get_load_balancer_sku(
self, enable_validation: bool = False, **kwargs
) -> str:
"""Dynamically obtain the value of load_balancer_sku according to the context.
When load_balancer_sku is not assigned, dynamic completion will be triggerd. Function "set_load_balancer_sku"
will be called and the corresponding load balancer sku will be returned according to the value of
kubernetes_version.
This function supports the option of enable_validation. When enabled, it will check if load_balancer_sku equals
to "basic" when api_server_authorized_ip_ranges is assigned, if so, raise the MutuallyExclusiveArgumentError.
This function supports the option of read_only. When enabled, it will skip dynamic completion and validation.
:return: string
"""
parameter_name = "load_balancer_sku"
# read the original value passed by the command
raw_value = self.raw_param.get(parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.network_profile:
value_obtained_from_mc = self.mc.network_profile.load_balancer_sku
# set default value
read_from_mc = False
if value_obtained_from_mc is not None:
load_balancer_sku = value_obtained_from_mc
read_from_mc = True
else:
load_balancer_sku = raw_value
# skip dynamic completion & validation if option read_only is specified
if kwargs.get("read_only"):
return load_balancer_sku
dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
if not load_balancer_sku:
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = dynamic_completion and not read_from_mc
if dynamic_completion:
load_balancer_sku = set_load_balancer_sku(
sku=load_balancer_sku,
kubernetes_version=self.get_kubernetes_version(),
)
# validation
if enable_validation:
if (
load_balancer_sku == "basic" and
self.get_api_server_authorized_ip_ranges()
):
raise MutuallyExclusiveArgumentError(
"--api-server-authorized-ip-ranges can only be used with standard load balancer"
)
return load_balancer_sku
# pylint: disable=unused-argument
def get_api_server_authorized_ip_ranges(
self, enable_validation: bool = False, **kwargs
) -> Union[str, List[str], None]:
"""Obtain the value of api_server_authorized_ip_ranges.
This function supports the option of enable_validation. When enabled, it will check if load_balancer_sku equals
to "basic" when api_server_authorized_ip_ranges is assigned, if so, raise the MutuallyExclusiveArgumentError.
:return: string, empty list or list of strings, or None
"""
parameter_name = "api_server_authorized_ip_ranges"
# read the original value passed by the command
raw_value = self.raw_param.get(parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.api_server_access_profile:
value_obtained_from_mc = (
self.mc.api_server_access_profile.authorized_ip_ranges
)
# set default value
if value_obtained_from_mc is not None:
api_server_authorized_ip_ranges = value_obtained_from_mc
else:
api_server_authorized_ip_ranges = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
if (
api_server_authorized_ip_ranges and
self.get_load_balancer_sku() == "basic"
):
raise MutuallyExclusiveArgumentError(
"--api-server-authorized-ip-ranges can only be used with standard load balancer"
)
return api_server_authorized_ip_ranges
# pylint: disable=unused-argument
def get_fqdn_subdomain(
self, enable_validation: bool = False, **kwargs
) -> Union[str, None]:
"""Obtain the value of fqdn_subdomain.
This function supports the option of enable_validation. When enabled, it will check if both dns_name_prefix and
fqdn_subdomain are assigend, if so, raise the MutuallyExclusiveArgumentError.
:return: string or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("fqdn_subdomain")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc:
value_obtained_from_mc = self.mc.fqdn_subdomain
# set default value
if value_obtained_from_mc is not None:
fqdn_subdomain = value_obtained_from_mc
else:
fqdn_subdomain = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
if fqdn_subdomain and self.get_dns_name_prefix(read_only=True):
raise MutuallyExclusiveArgumentError(
"--dns-name-prefix and --fqdn-subdomain cannot be used at same time"
)
return fqdn_subdomain
# pylint: disable=unused-argument
def get_nodepool_name(self, **kwargs) -> str:
|
# pylint: disable=unused-argument
def get_nodepool_tags(self, **kwargs) -> Union[Dict[str, str], None]:
"""Obtain the value of nodepool_tags.
:return: Dictionary or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("nodepool_tags")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.tags
# set default value
if value_obtained_from_mc is not None:
nodepool_tags = value_obtained_from_mc
else:
nodepool_tags = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return nodepool_tags
# pylint: disable=unused-argument
def get_nodepool_labels(self, **kwargs) -> Union[Dict[str, str], None]:
"""Obtain the value of nodepool_labels.
:return: Dictionary or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("nodepool_labels")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.node_labels
# set default value
if value_obtained_from_mc is not None:
nodepool_labels = value_obtained_from_mc
else:
nodepool_labels = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return nodepool_labels
# pylint: disable=unused-argument
def get_node_count(self, enable_validation: bool = False, **kwargs) -> int:
"""Obtain the value of node_count.
This function supports the option of enable_validation. When enabled, on the premise that
enable_cluster_autoscaler is enabled, it will check whether both min_count and max_count are assigned, if not,
raise the RequiredArgumentMissingError; if will also check whether node_count is between min_count and
max_count, if not, raise the InvalidArgumentValueError.
:return: int
"""
# read the original value passed by the command
raw_value = self.raw_param.get("node_count")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.count
# set default value
if value_obtained_from_mc is not None:
node_count = value_obtained_from_mc
else:
node_count = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
enable_cluster_autoscaler = self.get_enable_cluster_autoscaler()
min_count = self.get_min_count()
max_count = self.get_max_count()
if enable_cluster_autoscaler:
if min_count is None or max_count is None:
raise RequiredArgumentMissingError(
"Please specify both min-count and max-count when --enable-cluster-autoscaler enabled"
)
if node_count < min_count or node_count > max_count:
raise InvalidArgumentValueError(
"node-count is not in the range of min-count and max-count"
)
return int(node_count)
# pylint: disable=unused-argument
def get_node_vm_size(self, **kwargs) -> str:
"""Obtain the value of node_vm_size.
:return: string
"""
# read the original value passed by the command
raw_value = self.raw_param.get("node_vm_size")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.vm_size
# set default value
if value_obtained_from_mc is not None:
node_vm_size = value_obtained_from_mc
else:
node_vm_size = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return node_vm_size
# pylint: disable=unused-argument
def get_vnet_subnet_id(self, **kwargs) -> Union[str, None]:
"""Obtain the value of vnet_subnet_id.
:return: string or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("vnet_subnet_id")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.vnet_subnet_id
# set default value
if value_obtained_from_mc is not None:
vnet_subnet_id = value_obtained_from_mc
else:
vnet_subnet_id = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return vnet_subnet_id
# pylint: disable=unused-argument
def get_ppg(self, **kwargs) -> Union[str, None]:
"""Obtain the value of ppg(proximity_placement_group_id).
:return: string or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("ppg")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = (
agent_pool_profile.proximity_placement_group_id
)
# set default value
if value_obtained_from_mc is not None:
ppg = value_obtained_from_mc
else:
ppg = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return ppg
# pylint: disable=unused-argument
def get_zones(self, **kwargs) -> Union[List[str], None]:
"""Obtain the value of zones.
:return: list of strings or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("zones")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.availability_zones
# set default value
if value_obtained_from_mc is not None:
zones = value_obtained_from_mc
else:
zones = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return zones
# pylint: disable=unused-argument
def get_enable_node_public_ip(self, **kwargs) -> bool:
"""Obtain the value of enable_node_public_ip.
:return: bool
"""
# read the original value passed by the command
raw_value = self.raw_param.get("enable_node_public_ip")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = (
agent_pool_profile.enable_node_public_ip
)
# set default value
if value_obtained_from_mc is not None:
enable_node_public_ip = value_obtained_from_mc
else:
enable_node_public_ip = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return enable_node_public_ip
# pylint: disable=unused-argument
def get_node_public_ip_prefix_id(self, **kwargs) -> Union[str, None]:
"""Obtain the value of node_public_ip_prefix_id.
:return: string or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("node_public_ip_prefix_id")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = (
agent_pool_profile.node_public_ip_prefix_id
)
# set default value
if value_obtained_from_mc is not None:
node_public_ip_prefix_id = value_obtained_from_mc
else:
node_public_ip_prefix_id = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return node_public_ip_prefix_id
# pylint: disable=unused-argument
def get_enable_encryption_at_host(self, **kwargs) -> bool:
"""Obtain the value of enable_encryption_at_host.
:return: bool
"""
# read the original value passed by the command
raw_value = self.raw_param.get("enable_encryption_at_host")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = (
agent_pool_profile.enable_encryption_at_host
)
# set default value
if value_obtained_from_mc is not None:
enable_encryption_at_host = value_obtained_from_mc
else:
enable_encryption_at_host = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return enable_encryption_at_host
# pylint: disable=unused-argument
def get_enable_ultra_ssd(self, **kwargs) -> bool:
"""Obtain the value of enable_ultra_ssd.
:return: bool
"""
# read the original value passed by the command
raw_value = self.raw_param.get("enable_ultra_ssd")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.enable_ultra_ssd
# set default value
if value_obtained_from_mc is not None:
enable_ultra_ssd = value_obtained_from_mc
else:
enable_ultra_ssd = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return enable_ultra_ssd
# pylint: disable=unused-argument
def get_max_pods(self, **kwargs) -> Union[int, None]:
"""Obtain the value of max_pods.
Note: int 0 is converted to None.
:return: int or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("max_pods")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.max_pods
# set default value
if value_obtained_from_mc is not None:
max_pods = value_obtained_from_mc
else:
max_pods = raw_value
# Note: int 0 is converted to None
if max_pods:
max_pods = int(max_pods)
else:
max_pods = None
# this parameter does not need validation
return max_pods
# pylint: disable=unused-argument
def get_node_osdisk_size(self, **kwargs) -> Union[int, None]:
"""Obtain the value of node_osdisk_size.
Note: int 0 is converted to None.
:return: int or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("node_osdisk_size")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.os_disk_size_gb
# set default value
if value_obtained_from_mc is not None:
node_osdisk_size = value_obtained_from_mc
else:
node_osdisk_size = raw_value
# Note: 0 is converted to None
if node_osdisk_size:
node_osdisk_size = int(node_osdisk_size)
else:
node_osdisk_size = None
# this parameter does not need dynamic completion
# this parameter does not need validation
return node_osdisk_size
# pylint: disable=unused-argument
def get_node_osdisk_type(self, **kwargs) -> Union[str, None]:
"""Obtain the value of node_osdisk_size.
:return: string or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("node_osdisk_type")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.os_disk_type
# set default value
if value_obtained_from_mc is not None:
node_osdisk_type = value_obtained_from_mc
else:
node_osdisk_type = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return node_osdisk_type
# pylint: disable=unused-argument
def get_enable_cluster_autoscaler(
self, enable_validation: bool = False, **kwargs
) -> bool:
"""Obtain the value of enable_cluster_autoscaler.
This function supports the option of enable_validation. When enabled, on the premise that
enable_cluster_autoscaler is enabled, it will check whether both min_count and max_count are assigned, if not,
raise the RequiredArgumentMissingError; if will also check whether min_count is less than max_count and
node_count is between min_count and max_count, if not, raise the InvalidArgumentValueError. If
enable_cluster_autoscaler is not enabled, it will check whether any of min_count or max_count is assigned,
if so, raise the RequiredArgumentMissingError.
:return: bool
"""
# read the original value passed by the command
raw_value = self.raw_param.get("enable_cluster_autoscaler")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.enable_auto_scaling
# set default value
if value_obtained_from_mc is not None:
enable_cluster_autoscaler = value_obtained_from_mc
else:
enable_cluster_autoscaler = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
min_count = self.get_min_count()
max_count = self.get_max_count()
node_count = self.get_node_count()
if enable_cluster_autoscaler:
if min_count is None or max_count is None:
raise RequiredArgumentMissingError(
"Please specify both min-count and max-count when --enable-cluster-autoscaler enabled"
)
if min_count > max_count:
raise InvalidArgumentValueError(
"Value of min-count should be less than or equal to value of max-count"
)
if node_count < min_count or node_count > max_count:
raise InvalidArgumentValueError(
"node-count is not in the range of min-count and max-count"
)
else:
if min_count is not None or max_count is not None:
raise RequiredArgumentMissingError(
"min-count and max-count are required for --enable-cluster-autoscaler, please use the flag"
)
return enable_cluster_autoscaler
# pylint: disable=unused-argument
def get_min_count(
self, enable_validation: bool = False, **kwargs
) -> Union[int, None]:
"""Obtain the value of min_count.
This function supports the option of enable_validation. When enabled, on the premise that
enable_cluster_autoscaler is enabled, it will check whether both min_count and max_count are assigned, if not,
raise the RequiredArgumentMissingError; if will also check whether min_count is less than max_count and
node_count is between min_count and max_count, if not, raise the InvalidArgumentValueError. If
enable_cluster_autoscaler is not enabled, it will check whether any of min_count or max_count is assigned,
if so, raise the RequiredArgumentMissingError.
:return: int or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("min_count")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.min_count
# set default value
if value_obtained_from_mc is not None:
min_count = value_obtained_from_mc
else:
min_count = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
enable_cluster_autoscaler = self.get_enable_cluster_autoscaler()
max_count = self.get_max_count()
node_count = self.get_node_count()
if enable_cluster_autoscaler:
if min_count is None or max_count is None:
raise RequiredArgumentMissingError(
"Please specify both min-count and max-count when --enable-cluster-autoscaler enabled"
)
if min_count > max_count:
raise InvalidArgumentValueError(
"Value of min-count should be less than or equal to value of max-count"
)
if node_count < min_count or node_count > max_count:
raise InvalidArgumentValueError(
"node-count is not in the range of min-count and max-count"
)
else:
if min_count is not None or max_count is not None:
raise RequiredArgumentMissingError(
"min-count and max-count are required for --enable-cluster-autoscaler, please use the flag"
)
return min_count
# pylint: disable=unused-argument
def get_max_count(
self, enable_validation: bool = False, **kwargs
) -> Union[int, None]:
"""Obtain the value of max_count.
This function supports the option of enable_validation. When enabled, on the premise that
enable_cluster_autoscaler is enabled, it will check whether both min_count and max_count are assigned, if not,
raise the RequiredArgumentMissingError; if will also check whether min_count is less than max_count and
node_count is between min_count and max_count, if not, raise the InvalidArgumentValueError. If
enable_cluster_autoscaler is not enabled, it will check whether any of min_count or max_count is assigned,
if so, raise the RequiredArgumentMissingError.
:return: int or None
"""
# read the original value passed by the command
raw_value = self.raw_param.get("max_count")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.max_count
# set default value
if value_obtained_from_mc is not None:
max_count = value_obtained_from_mc
else:
max_count = raw_value
# this parameter does not need dynamic completion
# validation
if enable_validation:
enable_cluster_autoscaler = self.get_enable_cluster_autoscaler()
min_count = self.get_min_count()
node_count = self.get_node_count()
if enable_cluster_autoscaler:
if min_count is None or max_count is None:
raise RequiredArgumentMissingError(
"Please specify both min-count and max-count when --enable-cluster-autoscaler enabled"
)
if min_count > max_count:
raise InvalidArgumentValueError(
"Value of min-count should be less than or equal to value of max-count"
)
if node_count < min_count or node_count > max_count:
raise InvalidArgumentValueError(
"node-count is not in the range of min-count and max-count"
)
else:
if min_count is not None or max_count is not None:
raise RequiredArgumentMissingError(
"min-count and max-count are required for --enable-cluster-autoscaler, please use the flag"
)
return max_count
# pylint: disable=unused-argument
def get_admin_username(self, **kwargs) -> str:
"""Obtain the value of admin_username.
:return: str
"""
# read the original value passed by the command
raw_value = self.raw_param.get("admin_username")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.linux_profile:
value_obtained_from_mc = self.mc.linux_profile.admin_username
# set default value
if value_obtained_from_mc is not None:
admin_username = value_obtained_from_mc
else:
admin_username = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return admin_username
# pylint: disable=unused-argument
def get_windows_admin_username_and_password(
self, **kwargs
) -> Tuple[Union[str, None], Union[str, None]]:
"""Dynamically obtain the value of windows_admin_username and windows_admin_password according to the context.
When ont of windows_admin_username and windows_admin_password is not assigned, dynamic completion will be
triggerd. The user will be prompted to enter the missing windows_admin_username or windows_admin_password in
tty(pseudo terminal). If the program is running in a non-interactive environment, a NoTTYError error will be
raised.
This function supports the option of read_only. When enabled, it will skip dynamic completion and validation.
:return: a tuple containing two elements of string or None
"""
# windows_admin_username
# read the original value passed by the command
username_raw_value = self.raw_param.get("windows_admin_username")
# try to read the property value corresponding to the parameter from the `mc` object
username_value_obtained_from_mc = None
if self.mc and self.mc.windows_profile:
username_value_obtained_from_mc = (
self.mc.windows_profile.admin_username
)
# set default value
username_read_from_mc = False
if username_value_obtained_from_mc is not None:
windows_admin_username = username_value_obtained_from_mc
username_read_from_mc = True
else:
windows_admin_username = username_raw_value
# windows_admin_password
# read the original value passed by the command
password_raw_value = self.raw_param.get("windows_admin_password")
# try to read the property value corresponding to the parameter from the `mc` object
password_value_obtained_from_mc = None
if self.mc and self.mc.windows_profile:
password_value_obtained_from_mc = (
self.mc.windows_profile.admin_password
)
# set default value
password_read_from_mc = False
if password_value_obtained_from_mc is not None:
windows_admin_password = password_value_obtained_from_mc
password_read_from_mc = True
else:
windows_admin_password = password_raw_value
# consistent check
if username_read_from_mc != password_read_from_mc:
raise CLIInternalError(
"Inconsistent state detected, one of windows admin name and password is read from the `mc` object."
)
# skip dynamic completion & validation if option read_only is specified
if kwargs.get("read_only"):
return windows_admin_username, windows_admin_password
username_dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
# to avoid that windows_admin_password is set but windows_admin_username is not
if windows_admin_username is None and windows_admin_password:
username_dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
username_dynamic_completion = (
username_dynamic_completion and not username_read_from_mc
)
if username_dynamic_completion:
try:
windows_admin_username = prompt("windows_admin_username: ")
# The validation for admin_username in ManagedClusterWindowsProfile will fail even if
# users still set windows_admin_username to empty here
except NoTTYException:
raise NoTTYError(
"Please specify username for Windows in non-interactive mode."
)
password_dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
# to avoid that windows_admin_username is set but windows_admin_password is not
if windows_admin_password is None and windows_admin_username:
password_dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
password_dynamic_completion = (
password_dynamic_completion and not password_read_from_mc
)
if password_dynamic_completion:
try:
windows_admin_password = prompt_pass(
msg="windows-admin-password: ", confirm=True
)
except NoTTYException:
raise NoTTYError(
"Please specify both username and password in non-interactive mode."
)
# these parameters does not need validation
return windows_admin_username, windows_admin_password
# pylint: disable=unused-argument
def get_enable_ahub(self, **kwargs) -> bool:
"""Obtain the value of enable_ahub.
Note: This parameter will not be directly decorated into the `mc` object.
:return: bool
"""
# read the original value passed by the command
enable_ahub = self.raw_param.get("enable_ahub")
# read the original value passed by the command
raw_value = self.raw_param.get("enable_ahub")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.windows_profile:
value_obtained_from_mc = self.mc.windows_profile.license_type == "Windows_Server"
# set default value
if value_obtained_from_mc is not None:
enable_ahub = value_obtained_from_mc
else:
enable_ahub = raw_value
# this parameter does not need dynamic completion
# this parameter does not need validation
return enable_ahub
# pylint: disable=unused-argument,too-many-statements
def get_service_principal_and_client_secret(
self, **kwargs
) -> Tuple[Union[str, None], Union[str, None]]:
"""Dynamically obtain the values of service_principal and client_secret according to the context.
When service_principal and client_secret are not assigned and enable_managed_identity is True, dynamic
completion will not be triggered. For other cases, dynamic completion will be triggered.
When client_secret is given but service_principal is not, dns_name_prefix or fqdn_subdomain will be used to
create a service principal. The parameters subscription_id, location and name(cluster) are also required when
calling function "_ensure_aks_service_principal".
When service_principal is given but client_secret is not, function "_ensure_aks_service_principal" would raise
CLIError.
:return: a tuple containing two elements of string or None
"""
# service_principal
sp_parameter_name = "service_principal"
sp_property_name_in_mc = "client_id"
# read the original value passed by the command
sp_raw_value = self.raw_param.get(sp_parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
sp_value_obtained_from_mc = None
if self.mc and self.mc.service_principal_profile:
sp_value_obtained_from_mc = getattr(
self.mc.service_principal_profile, sp_property_name_in_mc
)
# set default value
sp_read_from_mc = False
if sp_value_obtained_from_mc is not None:
service_principal = sp_value_obtained_from_mc
sp_read_from_mc = True
else:
service_principal = sp_raw_value
# client_secret
secret_parameter_name = "client_secret"
secret_property_name_in_mc = "secret"
# read the original value passed by the command
secret_raw_value = self.raw_param.get(secret_parameter_name)
# try to read the property value corresponding to the parameter from the `mc` object
secret_value_obtained_from_mc = None
if self.mc and self.mc.service_principal_profile:
secret_value_obtained_from_mc = getattr(
self.mc.service_principal_profile, secret_property_name_in_mc
)
# set default value
secret_read_from_mc = False
if secret_value_obtained_from_mc is not None:
client_secret = secret_value_obtained_from_mc
secret_read_from_mc = True
else:
client_secret = secret_raw_value
# consistent check
if sp_read_from_mc != secret_read_from_mc:
raise CLIInternalError(
"Inconsistent state detected, one of sp and secret is read from the `mc` object."
)
# skip dynamic completion & validation if option read_only is specified
if kwargs.get("read_only"):
return service_principal, client_secret
# dynamic completion for service_principal and client_secret
dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
enable_managed_identity = self.get_enable_managed_identity(read_only=True)
if not (
enable_managed_identity and
not service_principal and
not client_secret
):
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = (
dynamic_completion and
not sp_read_from_mc and
not secret_read_from_mc
)
if dynamic_completion:
principal_obj = _ensure_aks_service_principal(
cli_ctx=self.cmd.cli_ctx,
service_principal=service_principal,
client_secret=client_secret,
subscription_id=self.get_intermediate(
"subscription_id", None
),
dns_name_prefix=self.get_dns_name_prefix(),
fqdn_subdomain=self.get_fqdn_subdomain(),
location=self.get_location(),
name=self.get_name(),
)
service_principal = principal_obj.get("service_principal")
client_secret = principal_obj.get("client_secret")
# these parameters do not need validation
return service_principal, client_secret
def get_enable_managed_identity(
self, enable_validation=False, **kwargs
) -> bool:
"""Dynamically obtain the values of service_principal and client_secret according to the context.
Note: This parameter will not be directly decorated into the `mc` object.
When both service_principal and client_secret are assigned and enable_managed_identity is True, dynamic
completion will be triggered. The value of enable_managed_identity will be set to False.
:return: bool
"""
# Note: This parameter will not be decorated into the `mc` object.
# read the original value passed by the command
raw_value = self.raw_param.get("enable_managed_identity")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.identity:
value_obtained_from_mc = self.mc.identity.type is not None
# set default value
read_from_mc = False
if value_obtained_from_mc is not None:
enable_managed_identity = value_obtained_from_mc
read_from_mc = True
else:
enable_managed_identity = raw_value
# skip dynamic completion & validation if option read_only is specified
if kwargs.get("read_only"):
return enable_managed_identity
dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
(
service_principal,
client_secret,
) = self.get_service_principal_and_client_secret(read_only=True)
if service_principal and client_secret:
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = dynamic_completion and not read_from_mc
if dynamic_completion:
enable_managed_identity = False
# validation
if enable_validation:
# TODO: add validation
pass
return enable_managed_identity
class AKSCreateDecorator:
def __init__(
self,
cmd: AzCliCommand,
client,
models: AKSCreateModels,
raw_parameters: Dict,
resource_type: ResourceType = ResourceType.MGMT_CONTAINERSERVICE,
):
self.cmd = cmd
self.client = client
self.models = models
# store the context in the process of assemble the ManagedCluster object
self.context = AKSCreateContext(cmd, raw_parameters)
# `resource_type` is used to dynamically find the model (of a specific api version) provided by the
# containerservice SDK, most models have been passed through the `modles` parameter (instantiatied
# from `AKSCreateModels` (or `PreviewAKSCreateModels` in aks-preview), where resource_type (i.e.,
# api version) has been specified), a very small number of models are instantiated through internal
# functions, one use case is that `api_server_access_profile` is initialized by function
# `_populate_api_server_access_profile` defined in `_helpers.py`
self.resource_type = resource_type
def init_mc(self):
# get subscription id and store as intermediate
subscription_id = get_subscription_id(self.cmd.cli_ctx)
self.context.set_intermediate(
"subscription_id", subscription_id, overwrite_exists=True
)
# initialize the `ManagedCluster` object with mandatory parameters (i.e. location)
mc = self.models.ManagedCluster(location=self.context.get_location())
return mc
def set_up_agent_pool_profiles(self, mc):
if not isinstance(mc, self.models.ManagedCluster):
raise CLIInternalError(
"Unexpected mc object with type '{}'.".format(type(mc))
)
agent_pool_profile = self.models.ManagedClusterAgentPoolProfile(
# Must be 12 chars or less before ACS RP adds to it
name=self.context.get_nodepool_name(enable_trim=True),
tags=self.context.get_nodepool_tags(),
node_labels=self.context.get_nodepool_labels(),
count=self.context.get_node_count(enable_validation=True),
vm_size=self.context.get_node_vm_size(),
os_type="Linux",
vnet_subnet_id=self.context.get_vnet_subnet_id(),
proximity_placement_group_id=self.context.get_ppg(),
availability_zones=self.context.get_zones(),
enable_node_public_ip=self.context.get_enable_node_public_ip(),
node_public_ip_prefix_id=self.context.get_node_public_ip_prefix_id(),
enable_encryption_at_host=self.context.get_enable_encryption_at_host(),
enable_ultra_ssd=self.context.get_enable_ultra_ssd(),
max_pods=self.context.get_max_pods(),
type=self.context.get_vm_set_type(),
mode="System",
os_disk_size_gb=self.context.get_node_osdisk_size(),
os_disk_type=self.context.get_node_osdisk_type(),
min_count=self.context.get_min_count(enable_validation=True),
max_count=self.context.get_max_count(enable_validation=True),
enable_auto_scaling=self.context.get_enable_cluster_autoscaler(
enable_validation=True
),
)
mc.agent_pool_profiles = [agent_pool_profile]
return mc
def set_up_linux_profile(self, mc):
if not isinstance(mc, self.models.ManagedCluster):
raise CLIInternalError(
"Unexpected mc object with type '{}'.".format(type(mc))
)
# LinuxProfile is just used for SSH access to VMs, so omit it if --no-ssh-key was specified.
if not self.context.get_no_ssh_key(enable_validation=True):
ssh_config = self.models.ContainerServiceSshConfiguration(
public_keys=[
self.models.ContainerServiceSshPublicKey(
key_data=self.context.get_ssh_key_value(
enable_validation=True
)
)
]
)
linux_profile = self.models.ContainerServiceLinuxProfile(
admin_username=self.context.get_admin_username(), ssh=ssh_config
)
mc.linux_profile = linux_profile
return mc
def set_up_windows_profile(self, mc):
if not isinstance(mc, self.models.ManagedCluster):
raise CLIInternalError(
"Unexpected mc object with type '{}'.".format(type(mc))
)
(
windows_admin_username,
windows_admin_password,
) = self.context.get_windows_admin_username_and_password()
if windows_admin_username or windows_admin_password:
windows_license_type = None
if self.context.get_enable_ahub():
windows_license_type = "Windows_Server"
# this would throw an error if windows_admin_username is empty (the user enters an empty
# string after being prompted), since admin_username is a required parameter
windows_profile = self.models.ManagedClusterWindowsProfile(
admin_username=windows_admin_username,
admin_password=windows_admin_password,
license_type=windows_license_type,
)
mc.windows_profile = windows_profile
# clean up intermediate after `mc` is decorated
self.context.remove_intermediate("windows_admin_username")
self.context.remove_intermediate("windows_admin_password")
return mc
def set_up_service_principal_profile(self, mc):
if not isinstance(mc, self.models.ManagedCluster):
raise CLIInternalError(
"Unexpected mc object with type '{}'.".format(type(mc))
)
# If customer explicitly provide a service principal, disable managed identity.
(
service_principal,
client_secret,
) = self.context.get_service_principal_and_client_secret()
enable_managed_identity = self.context.get_enable_managed_identity()
# Skip create service principal profile for the cluster if the cluster enables managed identity
# and customer doesn't explicitly provide a service principal.
if not (
enable_managed_identity and
not service_principal and
not client_secret
):
service_principal_profile = (
self.models.ManagedClusterServicePrincipalProfile(
client_id=service_principal, secret=client_secret
)
)
mc.service_principal_profile = service_principal_profile
# clean up intermediates after `mc` is decorated
self.context.remove_intermediate("service_principal")
self.context.remove_intermediate("client_secret")
return mc
def construct_default_mc(self):
# An all-in-one function used to create the complete `ManagedCluster` object, which will later be
# passed as a parameter to the underlying SDK (mgmt-containerservice) to send the actual request.
# Note: to reduce the risk of regression introduced by refactoring, this function is not complete
# and is being implemented gradually.
# initialize the `ManagedCluster` object, also set up the intermediate named "subscription_id"
mc = self.init_mc()
# set up agent pool profile(s)
mc = self.set_up_agent_pool_profiles(mc)
# set up linux profile (for ssh access)
mc = self.set_up_linux_profile(mc)
# set up windows profile
mc = self.set_up_windows_profile(mc)
# set up service principal profile
mc = self.set_up_service_principal_profile(mc)
return mc
|
"""Dynamically obtain the value of nodepool_name according to the context.
When additional option enable_trim is enabled, dynamic completion will be triggerd.
This function supports the option of enable_trim. When enabled, it will normalize the value of nodepool_name.
If no value is assigned, the default value "nodepool1" is set, and if the string length is greater than 12,
it is truncated.
:return: string
"""
# read the original value passed by the command
raw_value = self.raw_param.get("nodepool_name")
# try to read the property value corresponding to the parameter from the `mc` object
value_obtained_from_mc = None
if self.mc and self.mc.agent_pool_profiles:
agent_pool_profile = safe_list_get(
self.mc.agent_pool_profiles, 0, None
)
if agent_pool_profile:
value_obtained_from_mc = agent_pool_profile.name
# set default value
read_from_mc = False
if value_obtained_from_mc is not None:
nodepool_name = value_obtained_from_mc
read_from_mc = True
else:
nodepool_name = raw_value
dynamic_completion = False
# check whether the parameter meet the conditions of dynamic completion
if kwargs.get("enable_trim", False):
dynamic_completion = True
# disable dynamic completion if the value is read from `mc`
dynamic_completion = dynamic_completion and not read_from_mc
if dynamic_completion:
if not nodepool_name:
nodepool_name = "nodepool1"
else:
nodepool_name = nodepool_name[:12]
# this parameter does not need validation
return nodepool_name
|
memory.py
|
"""Python STIX2 Memory Source/Sink"""
import io
import itertools
import json
import os
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import FilterSet, apply_common_filters
from stix2.parsing import parse
def _add(store, stix_data, allow_custom=True, version=None):
"""Add STIX objects to MemoryStore/Sink.
Adds STIX objects to an in-memory dictionary for fast lookup.
Recursive function, breaks down STIX Bundles and lists.
Args:
store: A MemoryStore, MemorySink or MemorySource object.
stix_data (list OR dict OR STIX object): STIX objects to be added
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False.
version (str): Which STIX2 version to lock the parser to. (e.g. "2.0",
"2.1"). If None, the library makes the best effort to figure
out the spec representation of the object.
"""
if isinstance(stix_data, list):
# STIX objects are in a list- recurse on each object
for stix_obj in stix_data:
_add(store, stix_obj, allow_custom, version)
elif stix_data["type"] == "bundle":
# adding a json bundle - so just grab STIX objects
for stix_obj in stix_data.get("objects", []):
_add(store, stix_obj, allow_custom, version)
else:
# Adding a single non-bundle object
if isinstance(stix_data, _STIXBase):
stix_obj = stix_data
else:
stix_obj = parse(stix_data, allow_custom, version)
# Map ID to a _ObjectFamily if the object is versioned, so we can track
# multiple versions. Otherwise, map directly to the object. All
# versioned objects should have a "modified" property.
if "modified" in stix_obj:
if stix_obj["id"] in store._data:
obj_family = store._data[stix_obj["id"]]
else:
obj_family = _ObjectFamily()
store._data[stix_obj["id"]] = obj_family
obj_family.add(stix_obj)
else:
store._data[stix_obj["id"]] = stix_obj
class _ObjectFamily(object):
"""
An internal implementation detail of memory sources/sinks/stores.
Represents a "family" of STIX objects: all objects with a particular
ID. (I.e. all versions.) The latest version is also tracked so that it
can be obtained quickly.
"""
def __init__(self):
self.all_versions = {}
self.latest_version = None
def add(self, obj):
self.all_versions[obj["modified"]] = obj
if (self.latest_version is None or
obj["modified"] > self.latest_version["modified"]):
self.latest_version = obj
def __str__(self):
return "<<{}; latest={}>>".format(
self.all_versions,
self.latest_version["modified"],
)
def __repr__(self):
return str(self)
class MemoryStore(DataStoreMixin):
"""Interface to an in-memory dictionary of STIX objects.
MemoryStore is a wrapper around a paired MemorySink and MemorySource.
Note: It doesn't make sense to create a MemoryStore by passing
in existing MemorySource and MemorySink because there could
be data concurrency issues. As well, just as easy to create new MemoryStore.
Args:
stix_data (list OR dict OR STIX object): STIX content to be added
allow_custom (bool): whether to allow custom STIX content.
Only applied when export/input functions called, i.e.
load_from_file() and save_to_file(). Defaults to True.
Attributes:
_data (dict): the in-memory dict that holds STIX objects
source (MemorySource): MemorySource
sink (MemorySink): MemorySink
"""
def __init__(self, stix_data=None, allow_custom=True, version=None):
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
super(MemoryStore, self).__init__(
source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
)
def save_to_file(self, *args, **kwargs):
"""Write SITX objects from in-memory dictionary to JSON file, as a STIX
Bundle. If a directory is given, the Bundle 'id' will be used as
filename. Otherwise, the provided value will be used.
Args:
path (str): file path to write STIX data to.
encoding (str): The file encoding. Default utf-8.
"""
return self.sink.save_to_file(*args, **kwargs)
def load_from_file(self, *args, **kwargs):
"""Load STIX data from JSON file.
File format is expected to be a single JSON STIX object or JSON STIX
bundle.
Args:
path (str): file path to load STIX data from
"""
return self.source.load_from_file(*args, **kwargs)
class MemorySink(DataSink):
"""Interface for adding/pushing STIX objects to an in-memory dictionary.
Designed to be paired with a MemorySource, together as the two
components of a MemoryStore.
Args:
stix_data (dict OR list): valid STIX 2.0 content in
bundle or a list.
_store (bool): whether the MemorySink is a part of a MemoryStore,
in which case "stix_data" is a direct reference to
shared memory with DataSource. Not user supplied
allow_custom (bool): whether to allow custom objects/properties
when exporting STIX content to file.
Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes:
_data (dict): the in-memory dict that holds STIX objects.
If part of a MemoryStore, the dict is shared with a MemorySource
"""
def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False):
super(MemorySink, self).__init__()
self.allow_custom = allow_custom
if _store:
self._data = stix_data
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
def add(self, stix_data, version=None):
_add(self, stix_data, self.allow_custom, version)
add.__doc__ = _add.__doc__
def save_to_file(self, path, encoding="utf-8"):
path = os.path.abspath(path)
all_objs = list(itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
))
if any("spec_version" in x for x in all_objs):
bundle = v21.Bundle(all_objs, allow_custom=self.allow_custom)
else:
bundle = v20.Bundle(all_objs, allow_custom=self.allow_custom)
|
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
else:
if not os.path.exists(path):
os.makedirs(path)
# if the user only provided a directory, use the bundle id for filename
path = os.path.join(path, bundle["id"] + ".json")
with io.open(path, "w", encoding=encoding) as f:
bundle = bundle.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(bundle)
return path
save_to_file.__doc__ = MemoryStore.save_to_file.__doc__
class MemorySource(DataSource):
"""Interface for searching/retrieving STIX objects from an in-memory
dictionary.
Designed to be paired with a MemorySink, together as the two
components of a MemoryStore.
Args:
stix_data (dict OR list OR STIX object): valid STIX 2.0 content in
bundle or list.
_store (bool): if the MemorySource is a part of a MemoryStore,
in which case "stix_data" is a direct reference to shared
memory with DataSink. Not user supplied
allow_custom (bool): whether to allow custom objects/properties
when importing STIX content from file.
Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes:
_data (dict): the in-memory dict that holds STIX objects.
If part of a MemoryStore, the dict is shared with a MemorySink
"""
def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False):
super(MemorySource, self).__init__()
self.allow_custom = allow_custom
if _store:
self._data = stix_data
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
def get(self, stix_id, _composite_filters=None):
"""Retrieve STIX object from in-memory dict via STIX ID.
Args:
stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
Returns:
(STIX object): STIX object that has the supplied ID.
"""
stix_obj = None
mapped_value = self._data.get(stix_id)
if mapped_value:
if isinstance(mapped_value, _ObjectFamily):
stix_obj = mapped_value.latest_version
else:
stix_obj = mapped_value
if stix_obj:
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters,
),
)
stix_obj = next(apply_common_filters([stix_obj], all_filters), None)
return stix_obj
def all_versions(self, stix_id, _composite_filters=None):
"""Retrieve STIX objects from in-memory dict via STIX ID, all versions
of it.
Args:
stix_id (str): The STIX ID of the STIX 2 object to retrieve.
_composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that have the supplied ID.
"""
results = []
mapped_value = self._data.get(stix_id)
if mapped_value:
if isinstance(mapped_value, _ObjectFamily):
stix_objs_to_filter = mapped_value.all_versions.values()
else:
stix_objs_to_filter = [mapped_value]
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters,
),
)
results.extend(
apply_common_filters(stix_objs_to_filter, all_filters),
)
return results
def query(self, query=None, _composite_filters=None):
"""Search and retrieve STIX objects based on the complete query.
A "complete query" includes the filters from the query, the filters
attached to this MemorySource, and any filters passed from a
CompositeDataSource (i.e. _composite_filters).
Args:
query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that match the supplied query.
"""
query = FilterSet(query)
# combine all query filters
if self.filters:
query.add(self.filters)
if _composite_filters:
query.add(_composite_filters)
all_objs = itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
)
# Apply STIX common property filters.
all_data = list(apply_common_filters(all_objs, query))
return all_data
def load_from_file(self, file_path, version=None, encoding='utf-8'):
with io.open(os.path.abspath(file_path), "r", encoding=encoding) as f:
stix_data = json.load(f)
_add(self, stix_data, self.allow_custom, version)
load_from_file.__doc__ = MemoryStore.load_from_file.__doc__
|
if path.endswith(".json"):
|
policy.ts
|
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
import * as pulumi from "@pulumi/pulumi";
import * as inputs from "../types/input";
import * as outputs from "../types/output";
import * as utilities from "../utilities";
/**
* Manages a Policy within a Dev Test Policy Set.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as azure from "@pulumi/azure";
*
* const exampleResourceGroup = new azure.core.ResourceGroup("example", {
* location: "West US",
* });
* const exampleLab = new azure.devtest.Lab("example", {
* location: exampleResourceGroup.location,
* resourceGroupName: exampleResourceGroup.name,
* tags: {
* Sydney: "Australia",
* },
* });
* const examplePolicy = new azure.devtest.Policy("example", {
* evaluatorType: "MaxValuePolicy",
* factData: "",
* labName: exampleLab.name,
* policySetName: "default",
* resourceGroupName: exampleResourceGroup.name,
* tags: {
* Acceptance: "Test",
* },
* threshold: "999",
* });
* ```
*
* > This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/dev_test_policy.html.markdown.
*/
export class
|
extends pulumi.CustomResource {
/**
* Get an existing Policy resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: PolicyState, opts?: pulumi.CustomResourceOptions): Policy {
return new Policy(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'azure:devtest/policy:Policy';
/**
* Returns true if the given object is an instance of Policy. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Policy {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Policy.__pulumiType;
}
/**
* A description for the Policy.
*/
public readonly description!: pulumi.Output<string | undefined>;
/**
* The Evaluation Type used for this Policy. Possible values include: 'AllowedValuesPolicy', 'MaxValuePolicy'. Changing this forces a new resource to be created.
*/
public readonly evaluatorType!: pulumi.Output<string>;
/**
* The Fact Data for this Policy.
*/
public readonly factData!: pulumi.Output<string | undefined>;
/**
* Specifies the name of the Dev Test Lab in which the Policy should be created. Changing this forces a new resource to be created.
*/
public readonly labName!: pulumi.Output<string>;
/**
* Specifies the name of the Dev Test Policy. Possible values are `GalleryImage`, `LabPremiumVmCount`, `LabTargetCost`, `LabVmCount`, `LabVmSize`, `UserOwnedLabPremiumVmCount`, `UserOwnedLabVmCount` and `UserOwnedLabVmCountInSubnet`. Changing this forces a new resource to be created.
*/
public readonly name!: pulumi.Output<string>;
/**
* Specifies the name of the Policy Set within the Dev Test Lab where this policy should be created. Changing this forces a new resource to be created.
*/
public readonly policySetName!: pulumi.Output<string>;
/**
* The name of the resource group in which the Dev Test Lab resource exists. Changing this forces a new resource to be created.
*/
public readonly resourceGroupName!: pulumi.Output<string>;
/**
* A mapping of tags to assign to the resource.
*/
public readonly tags!: pulumi.Output<{[key: string]: string}>;
/**
* The Threshold for this Policy.
*/
public readonly threshold!: pulumi.Output<string>;
/**
* Create a Policy resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: PolicyArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: PolicyArgs | PolicyState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
if (opts && opts.id) {
const state = argsOrState as PolicyState | undefined;
inputs["description"] = state ? state.description : undefined;
inputs["evaluatorType"] = state ? state.evaluatorType : undefined;
inputs["factData"] = state ? state.factData : undefined;
inputs["labName"] = state ? state.labName : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["policySetName"] = state ? state.policySetName : undefined;
inputs["resourceGroupName"] = state ? state.resourceGroupName : undefined;
inputs["tags"] = state ? state.tags : undefined;
inputs["threshold"] = state ? state.threshold : undefined;
} else {
const args = argsOrState as PolicyArgs | undefined;
if (!args || args.evaluatorType === undefined) {
throw new Error("Missing required property 'evaluatorType'");
}
if (!args || args.labName === undefined) {
throw new Error("Missing required property 'labName'");
}
if (!args || args.policySetName === undefined) {
throw new Error("Missing required property 'policySetName'");
}
if (!args || args.resourceGroupName === undefined) {
throw new Error("Missing required property 'resourceGroupName'");
}
if (!args || args.threshold === undefined) {
throw new Error("Missing required property 'threshold'");
}
inputs["description"] = args ? args.description : undefined;
inputs["evaluatorType"] = args ? args.evaluatorType : undefined;
inputs["factData"] = args ? args.factData : undefined;
inputs["labName"] = args ? args.labName : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["policySetName"] = args ? args.policySetName : undefined;
inputs["resourceGroupName"] = args ? args.resourceGroupName : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["threshold"] = args ? args.threshold : undefined;
}
if (!opts) {
opts = {}
}
if (!opts.version) {
opts.version = utilities.getVersion();
}
super(Policy.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Policy resources.
*/
export interface PolicyState {
/**
* A description for the Policy.
*/
readonly description?: pulumi.Input<string>;
/**
* The Evaluation Type used for this Policy. Possible values include: 'AllowedValuesPolicy', 'MaxValuePolicy'. Changing this forces a new resource to be created.
*/
readonly evaluatorType?: pulumi.Input<string>;
/**
* The Fact Data for this Policy.
*/
readonly factData?: pulumi.Input<string>;
/**
* Specifies the name of the Dev Test Lab in which the Policy should be created. Changing this forces a new resource to be created.
*/
readonly labName?: pulumi.Input<string>;
/**
* Specifies the name of the Dev Test Policy. Possible values are `GalleryImage`, `LabPremiumVmCount`, `LabTargetCost`, `LabVmCount`, `LabVmSize`, `UserOwnedLabPremiumVmCount`, `UserOwnedLabVmCount` and `UserOwnedLabVmCountInSubnet`. Changing this forces a new resource to be created.
*/
readonly name?: pulumi.Input<string>;
/**
* Specifies the name of the Policy Set within the Dev Test Lab where this policy should be created. Changing this forces a new resource to be created.
*/
readonly policySetName?: pulumi.Input<string>;
/**
* The name of the resource group in which the Dev Test Lab resource exists. Changing this forces a new resource to be created.
*/
readonly resourceGroupName?: pulumi.Input<string>;
/**
* A mapping of tags to assign to the resource.
*/
readonly tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The Threshold for this Policy.
*/
readonly threshold?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a Policy resource.
*/
export interface PolicyArgs {
/**
* A description for the Policy.
*/
readonly description?: pulumi.Input<string>;
/**
* The Evaluation Type used for this Policy. Possible values include: 'AllowedValuesPolicy', 'MaxValuePolicy'. Changing this forces a new resource to be created.
*/
readonly evaluatorType: pulumi.Input<string>;
/**
* The Fact Data for this Policy.
*/
readonly factData?: pulumi.Input<string>;
/**
* Specifies the name of the Dev Test Lab in which the Policy should be created. Changing this forces a new resource to be created.
*/
readonly labName: pulumi.Input<string>;
/**
* Specifies the name of the Dev Test Policy. Possible values are `GalleryImage`, `LabPremiumVmCount`, `LabTargetCost`, `LabVmCount`, `LabVmSize`, `UserOwnedLabPremiumVmCount`, `UserOwnedLabVmCount` and `UserOwnedLabVmCountInSubnet`. Changing this forces a new resource to be created.
*/
readonly name?: pulumi.Input<string>;
/**
* Specifies the name of the Policy Set within the Dev Test Lab where this policy should be created. Changing this forces a new resource to be created.
*/
readonly policySetName: pulumi.Input<string>;
/**
* The name of the resource group in which the Dev Test Lab resource exists. Changing this forces a new resource to be created.
*/
readonly resourceGroupName: pulumi.Input<string>;
/**
* A mapping of tags to assign to the resource.
*/
readonly tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The Threshold for this Policy.
*/
readonly threshold: pulumi.Input<string>;
}
|
Policy
|
comment-list-item.js
|
import {authorReputation} from '../filters/author-reputation'
export default () => {
return {
restrict: 'AE',
replace: true,
scope: {
comment: '='
},
link: (scope, element, attrs) => {
scope.removeDirective = function () {
scope.$destroy();
element.remove();
};
},
template: `
<div class="comment-list-item" ng-class="{'selected': comment._selected_, 'deleting': deleting, 'deleted': deleted, 'bad': isBad }">
<div class="comment-list-item-inner">
<div class="comment-author-pic" author-bg-img-style author="{{ comment.author }}"></div>
<div class="comment-header">
<span class="comment-author"><author-name author-data="comment.author_data"></author-name> </span>
<span class="comment-author-reputation">{{ comment.author_reputation|authorReputation|number:0 }}</span>
<span class="comment-date"><a ng-click="goComment()" ng-class="{'no-child': comment.comments.length==0}" title="{{ comment.created|dateFormatted }}"> {{comment.created|timeAgo}}</a></span>
<a ng-click="reveal()" class="comment-reveal">Reveal Comment</a>
</div>
<div class="comment-body markdown-view mini-markdown" ng-bind-html="comment | commentBody | markDown2Html"></div>
<div class="comment-footer">
<div class="comment-voting">
<div class="comment-up-vote">
<content-vote content="comment"></content-vote>
</div>
<div class="comment-total">
<content-payout-info content="comment"></content-payout-info>
</div>
</div>
<div class="comment-voters">
<content-voters-info content="comment"></content-voters-info>
</div>
<div class="comment-reply">
<a ng-click="" login-required required-keys="'posting'" on-login-success="replyClicked()">{{ 'REPLY' | translate }}</a>
|
<div class="comment-encrypted" ng-if="encrypted"><i class="fa fa-lock" uib-tooltip="{{ 'Encrypted comment' | __ }}"></i></div>
</div>
<div class="comment-extra-tools">
<div class="comment-flagging">
<a ng-class="{'flagged': flagged, 'flagging': flagging}" ng-click="" login-required required-keys="'posting'" on-login-success="flagClicked()">
<i ng-if="!flagging" class="fa fa-flag-o"></i>
<i ng-if="flagging" class="fa fa-spin fa-spinner fa-circle-o-notch"></i>
</a>
</div>
</div>
</div>
<comment-editor ng-if="commentFlag" content="comment" mode="{{ commentMode }}" on-cancel="onCommentEditorCanceled()" after-success="afterNewComment"></comment-editor>
<comment-list comments="comment.comments"></comment-list>
</div>
`,
controller: ($scope, $rootScope, $filter, $timeout, $location, $window, $confirm, dpayAuthenticatedService, activeUsername) => {
const activeUser = activeUsername();
if (!$scope.comment.comments) {
$scope.comment.comments = [];
}
$scope.encrypted = false;
let jsonMeta = {};
try {
jsonMeta = JSON.parse($scope.comment.json_metadata);
} catch (e) { }
if (jsonMeta.encrypted === 1) {
$scope.encrypted = true;
}
$scope.canEdit = false;
$scope.deleting = false;
$scope.deleted = false;
const detectCanEdit = () => {
$scope.canEdit = (activeUser === $scope.comment.author);
};
detectCanEdit();
$rootScope.$on('userLoggedOut', () => {
detectCanEdit();
});
$rootScope.$on('userLoggedIn', () => {
detectCanEdit();
});
$scope.deleteClicked = () => {
if ($window.confirm($filter('translate')('ARE_YOU_SURE'))) {
$scope.deleting = true;
dpayAuthenticatedService.deleteComment($scope.comment.author, $scope.comment.permlink).then((resp) => {
$scope.deleted = true;
$timeout(() => {
$scope.removeDirective();
}, 100);
}).catch((e) => {
$rootScope.showError(e);
}).then(() => {
$scope.deleting = false;
});
}
};
$scope.commentFlag = false;
$scope.replyClicked = () => {
$rootScope.$broadcast('commentEditorOpening');
$timeout(() => {
$scope.commentMode = 'reply';
$scope.commentFlag = true;
}, 100);
};
$scope.editClicked = () => {
$rootScope.$broadcast('commentEditorOpening');
$timeout(() => {
$scope.commentMode = 'edit';
$scope.commentFlag = true;
}, 100);
};
$rootScope.$on('commentEditorOpening', () => {
$scope.commentFlag = false;
});
$scope.flagged = false;
$scope.canFlag = !(activeUser === $scope.comment.author);
$scope.flagging = false;
if ($scope.canFlag && activeUser) {
for (let vote of $scope.comment.active_votes) {
if (vote.voter === activeUser && vote.percent < 0) {
$scope.flagged = true;
}
}
}
$scope.flagClicked = () => {
$confirm($filter('translate')('ARE_YOU_SURE'), $filter('translate')('FLAGGING_TEXT'), () => {
const author = $scope.comment.author;
const permlink = $scope.comment.permlink;
$scope.flagging = true;
dpayAuthenticatedService.vote(author, permlink, -10000).then((resp) => {
$rootScope.$broadcast('CONTENT_VOTED', {
author: author,
permlink: permlink,
weight: -1
});
}).catch((e) => {
$rootScope.showError(e);
}).then(() => {
$scope.flagging = false;
});
});
};
$rootScope.$on('CONTENT_VOTED', (r, d) => {
if ($scope.comment.author === d.author && $scope.comment.permlink === d.permlink) {
$scope.flagged = d.weight < 0;
}
});
$scope.onCommentEditorCanceled = () => {
$scope.commentFlag = false;
};
$scope.afterNewComment = (newComment, mode) => {
if (mode === 'reply') {
$scope.comment.comments.push(newComment);
} else if (mode === 'edit') {
$scope.comment.body = newComment.body;
// it is required to update json_metadata for encrypted comments
$scope.comment.json_metadata = newComment.json_metadata;
}
};
$scope.isBad = $scope.comment.net_rshares < 0 || $scope.comment.author_data.reputation < 0;
$scope.reveal = () => {
$scope.isBad = false;
};
$scope.goComment = () => {
const tag = $scope.comment.url.split('/')[1];
$rootScope.selectedPost = null;
let u = `/post/${tag}/${ $scope.comment.author}/${ $scope.comment.permlink}`;
$location.path(u);
}
}
};
};
|
</div>
<div class="comment-edit" ng-if="canEdit"><a ng-click="" login-required required-keys="'posting'" on-login-success="editClicked()" title="{{ 'EDIT' | translate }}"><i class="fa fa-pencil"></i></a></div>
<div class="comment-delete" ng-if="canEdit"><a ng-click="" login-required required-keys="'posting'" on-login-success="deleteClicked()" title="{{ 'REMOVE' | translate }}"><i class="fa fa-spin fa-spinner fa-circle-o-notch" ng-if="deleting"></i><i class="fa fa-times" ng-if="!deleting"></i></a></div>
|
zaishen-quest.ts
|
import { Command, CommandoClient, CommandoMessage } from 'discord.js-commando';
import { createEmbed } from '../../lib/zaishen-quest';
export = class ZaishenQuestCommand extends Command {
constructor(client: CommandoClient) {
super(client, {
|
memberName: 'zq',
description: 'Displays current Zaishen Quest Information with a countdown',
details: ''
});
}
async run(message: CommandoMessage) {
return message.say(createEmbed(new Date()));
}
}
|
name: 'zaishen-quest',
aliases: ['zq'],
group: 'gw',
|
trainer.py
|
# coding=utf-8
# Copyright 2020-present the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The Trainer class, to easily train a 🤗 Transformers from scratch or finetune it on a new task.
"""
import collections
import inspect
import math
import os
import re
import shutil
import time
import warnings
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
# Integrations must be imported before ML frameworks:
from .integrations import ( # isort: split
default_hp_search_backend,
get_reporting_integration_callbacks,
hp_params,
is_fairscale_available,
is_optuna_available,
is_ray_tune_available,
run_hp_search_optuna,
run_hp_search_ray,
init_deepspeed,
)
import numpy as np
import torch
from packaging import version
from torch import nn
from torch.utils.data.dataloader import DataLoader
from torch.utils.data.dataset import Dataset
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import RandomSampler, SequentialSampler
from .data.data_collator import DataCollator, DataCollatorWithPadding, default_data_collator
from .file_utils import (
WEIGHTS_NAME,
is_apex_available,
is_datasets_available,
is_in_notebook,
is_sagemaker_distributed_available,
is_torch_tpu_available,
)
from .modeling_utils import PreTrainedModel
from .models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPING
from .optimization import Adafactor, AdamW, get_scheduler
from .tokenization_utils_base import PreTrainedTokenizerBase
from .trainer_callback import (
CallbackHandler,
DefaultFlowCallback,
PrinterCallback,
ProgressCallback,
TrainerCallback,
TrainerControl,
TrainerState,
)
from .trainer_pt_utils import (
DistributedLengthGroupedSampler,
DistributedTensorGatherer,
LabelSmoother,
LengthGroupedSampler,
SequentialDistributedSampler,
distributed_broadcast_scalars,
distributed_concat,
nested_concat,
nested_detach,
nested_numpify,
nested_xla_mesh_reduce,
reissue_pt_warnings,
)
from .trainer_utils import (
PREFIX_CHECKPOINT_DIR,
BestRun,
EvalPrediction,
HPSearchBackend,
PredictionOutput,
TrainOutput,
default_compute_objective,
default_hp_space,
set_seed,
speed_metrics,
)
from .training_args import ParallelMode, TrainingArguments
from .utils import logging
_is_native_amp_available = False
DEFAULT_CALLBACKS = [DefaultFlowCallback]
DEFAULT_PROGRESS_CALLBACK = ProgressCallback
if is_in_notebook():
from .utils.notebook import NotebookProgressCallback
DEFAULT_PROGRESS_CALLBACK = NotebookProgressCallback
if is_apex_available():
from apex import amp
if version.parse(torch.__version__) >= version.parse("1.6"):
_is_native_amp_available = True
from torch.cuda.amp import autocast
if is_datasets_available():
import datasets
if is_torch_tpu_available():
import torch_xla.core.xla_model as xm
import torch_xla.debug.metrics as met
import torch_xla.distributed.parallel_loader as pl
if is_fairscale_available():
from fairscale.nn.data_parallel import ShardedDataParallel as ShardedDDP
from fairscale.optim import OSS
from fairscale.optim.grad_scaler import ShardedGradScaler
if is_sagemaker_distributed_available():
import smdistributed.dataparallel.torch.distributed as dist
from smdistributed.dataparallel.torch.parallel.distributed import DistributedDataParallel as DDP
else:
import torch.distributed as dist
if TYPE_CHECKING:
import optuna
logger = logging.get_logger(__name__)
def _model_unwrap(model: nn.Module) -> nn.Module:
# since there could be multiple levels of wrapping, unwrap recursively
if hasattr(model, "module"):
return _model_unwrap(model.module)
else:
return model
class Trainer:
"""
Trainer is a simple but feature-complete training and eval loop for PyTorch, optimized for 🤗 Transformers.
Args:
model (:class:`~transformers.PreTrainedModel` or :obj:`torch.nn.Module`, `optional`):
The model to train, evaluate or use for predictions. If not provided, a ``model_init`` must be passed.
.. note::
:class:`~transformers.Trainer` is optimized to work with the :class:`~transformers.PreTrainedModel`
provided by the library. You can still use your own models defined as :obj:`torch.nn.Module` as long as
they work the same way as the 🤗 Transformers models.
args (:class:`~transformers.TrainingArguments`, `optional`):
The arguments to tweak for training. Will default to a basic instance of
:class:`~transformers.TrainingArguments` with the ``output_dir`` set to a directory named `tmp_trainer` in
the current directory if not provided.
data_collator (:obj:`DataCollator`, `optional`):
The function to use to form a batch from a list of elements of :obj:`train_dataset` or :obj:`eval_dataset`.
Will default to :func:`~transformers.default_data_collator` if no ``tokenizer`` is provided, an instance of
:func:`~transformers.DataCollatorWithPadding` otherwise.
train_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
The dataset to use for training. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed.
eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
The dataset to use for evaluation. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed.
tokenizer (:class:`PreTrainedTokenizerBase`, `optional`):
The tokenizer used to preprocess the data. If provided, will be used to automatically pad the inputs the
maximum length when batching inputs, and it will be saved along the model to make it easier to rerun an
interrupted training or reuse the fine-tuned model.
model_init (:obj:`Callable[[], PreTrainedModel]`, `optional`):
A function that instantiates the model to be used. If provided, each call to
:meth:`~transformers.Trainer.train` will start from a new instance of the model as given by this function.
The function may have zero argument, or a single one containing the optuna/Ray Tune trial object, to be
able to choose different architectures according to hyper parameters (such as layer count, sizes of inner
layers, dropout probabilities etc).
compute_metrics (:obj:`Callable[[EvalPrediction], Dict]`, `optional`):
The function that will be used to compute metrics at evaluation. Must take a
:class:`~transformers.EvalPrediction` and return a dictionary string to metric values.
callbacks (List of :obj:`~transformers.TrainerCallback`, `optional`):
A list of callbacks to customize the training loop. Will add those to the list of default callbacks
detailed in :doc:`here <callback>`.
If you want to remove one of the default callbacks used, use the :meth:`Trainer.remove_callback` method.
optimizers (:obj:`Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR`, `optional`): A tuple
containing the optimizer and the scheduler to use. Will default to an instance of
:class:`~transformers.AdamW` on your model and a scheduler given by
:func:`~transformers.get_linear_schedule_with_warmup` controlled by :obj:`args`.
Important attributes:
- **model** -- Always points to the core model. If using a transformers model, it will be a
:class:`~transformers.PreTrainedModel` subclass.
- **model_wrapped** -- Always points to the most external model in case one or more other modules wrap the
original model. This is the model that should be used for the forward pass. For example, under ``DeepSpeed``,
the inner model is wrapped in ``DeepSpeed`` and then again in ``torch.nn.DistributedDataParallel``. If the
inner model hasn't been wrapped, then ``self.model_wrapped`` is the same as ``self.model``.
- **is_model_parallel** -- Whether or not a model has been switched to a model parallel mode (different from
data parallelism, this means some of the model layers are split on different GPUs).
"""
def __init__(
self,
model: Union[PreTrainedModel, torch.nn.Module] = None,
args: TrainingArguments = None,
data_collator: Optional[DataCollator] = None,
train_dataset: Optional[Dataset] = None,
eval_dataset: Optional[Dataset] = None,
tokenizer: Optional["PreTrainedTokenizerBase"] = None,
model_init: Callable[[], PreTrainedModel] = None,
compute_metrics: Optional[Callable[[EvalPrediction], Dict]] = None,
callbacks: Optional[List[TrainerCallback]] = None,
optimizers: Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR] = (None, None),
):
if args is None:
output_dir = "tmp_trainer"
logger.info(f"No `TrainingArguments` passed, using `output_dir={output_dir}`.")
args = TrainingArguments(output_dir=output_dir)
self.args = args
# Seed must be set before instantiating the model when using model
set_seed(self.args.seed)
self.hp_name = None
self.deepspeed = None
if model is None:
if model_init is not None:
self.model_init = model_init
model = self.call_model_init()
else:
raise RuntimeError("`Trainer` requires either a `model` or `model_init` argument")
else:
if model_init is not None:
warnings.warn(
"`Trainer` requires either a `model` or `model_init` argument, but not both. "
"`model_init` will overwrite your model when calling the `train` method. This will become a fatal error in the next release.",
FutureWarning,
)
self.model_init = model_init
if hasattr(model, "is_parallelizable") and model.is_parallelizable and model.model_parallel:
self.is_model_parallel = True
else:
self.is_model_parallel = False
default_collator = default_data_collator if tokenizer is None else DataCollatorWithPadding(tokenizer)
self.data_collator = data_collator if data_collator is not None else default_collator
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.tokenizer = tokenizer
# Model parallel
if not self.is_model_parallel:
model = model.to(args.device)
else:
# Force n_gpu to 1 to avoid DataParallel.
self.args._n_gpu = 1
# later use `self.model is self.model_wrapped` to check if it's wrapped or not
self.model_wrapped = model
self.model = model
self.compute_metrics = compute_metrics
self.optimizer, self.lr_scheduler = optimizers
if model_init is not None and (self.optimizer is not None or self.lr_scheduler is not None):
raise RuntimeError(
"Passing a `model_init` is incompatible with providing the `optimizers` argument."
"You should subclass `Trainer` and override the `create_optimizer_and_scheduler` method."
)
default_callbacks = DEFAULT_CALLBACKS + get_reporting_integration_callbacks(self.args.report_to)
callbacks = default_callbacks if callbacks is None else default_callbacks + callbacks
self.callback_handler = CallbackHandler(
callbacks, self.model, self.tokenizer, self.optimizer, self.lr_scheduler
)
self.add_callback(PrinterCallback if self.args.disable_tqdm else DEFAULT_PROGRESS_CALLBACK)
# Will be set to True by `self._setup_loggers()` on first call to `self.log()`.
self._loggers_initialized = False
# Create output directory if needed
if self.is_world_process_zero():
os.makedirs(self.args.output_dir, exist_ok=True)
if is_torch_tpu_available() and isinstance(self.model, PreTrainedModel):
# Set an xla_device flag on the model's config.
# We'll find a more elegant and not need to do this in the future.
self.model.config.xla_device = True
if not callable(self.data_collator) and callable(getattr(self.data_collator, "collate_batch", None)):
raise ValueError("The `data_collator` should be a simple callable (function, class with `__call__`).")
if args.max_steps > 0:
logger.info("max_steps is given, it will override any value given in num_train_epochs")
# Enforce rules on using datasets with no __len__
if train_dataset is not None and not isinstance(train_dataset, collections.abc.Sized) and args.max_steps <= 0:
raise ValueError("train_dataset does not implement __len__, max_steps has to be specified")
if eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
if is_datasets_available():
if isinstance(train_dataset, datasets.Dataset):
self._remove_unused_columns(self.train_dataset, description="training")
if isinstance(eval_dataset, datasets.Dataset):
self._remove_unused_columns(self.eval_dataset, description="evaluation")
# Setup Sharded DDP training
self.sharded_dpp = False
if args.sharded_ddp:
if args.deepspeed:
raise ValueError(
"Using --sharded_ddp together with --deepspeed is not possible, deactivate one of those flags."
)
if args.local_rank == -1:
raise ValueError("Using sharded DDP only works in distributed training.")
elif not is_fairscale_available():
raise ImportError("Sharded DDP training requires fairscale: `pip install fairscale`.")
else:
self.sharded_dpp = True
# Mixed precision setup
self.use_apex = False
self.use_amp = False
self.fp16_backend = None
if args.fp16:
if args.fp16_backend == "auto":
self.fp16_backend = "amp" if _is_native_amp_available else "apex"
else:
self.fp16_backend = args.fp16_backend
logger.info(f"Using {self.fp16_backend} fp16 backend")
if args.fp16 and not args.deepspeed: # deepspeed manages its own fp16
if self.fp16_backend == "amp":
self.use_amp = True
self.scaler = ShardedGradScaler() if self.sharded_dpp else torch.cuda.amp.GradScaler()
else:
if not is_apex_available():
raise ImportError(
"Using FP16 with APEX but APEX is not installed, please refer to https://www.github.com/nvidia/apex."
)
self.use_apex = True
# Label smoothing
if self.args.label_smoothing_factor != 0:
self.label_smoother = LabelSmoother(epsilon=self.args.label_smoothing_factor)
else:
self.label_smoother = None
self.state = TrainerState()
self.control = TrainerControl()
# Internal variable for total_flos used to count as tensors (for distributed + TPU), will be sent in the
# state at each call to self.log.
self._total_flos = None
self.hp_search_backend = None
self.use_tune_checkpoints = False
default_label_names = (
["start_positions", "end_positions"]
if type(self.model) in MODEL_FOR_QUESTION_ANSWERING_MAPPING.values()
else ["labels"]
)
self.label_names = default_label_names if self.args.label_names is None else self.args.label_names
self.control = self.callback_handler.on_init_end(self.args, self.state, self.control)
def add_callback(self, callback):
"""
Add a callback to the current list of :class:`~transformer.TrainerCallback`.
Args:
callback (:obj:`type` or :class:`~transformer.TrainerCallback`):
A :class:`~transformer.TrainerCallback` class or an instance of a :class:`~transformer.TrainerCallback`.
In the first case, will instantiate a member of that class.
"""
self.callback_handler.add_callback(callback)
def pop_callback(self, callback):
"""
Remove a callback from the current list of :class:`~transformer.TrainerCallback` and returns it.
If the callback is not found, returns :obj:`None` (and no error is raised).
Args:
callback (:obj:`type` or :class:`~transformer.TrainerCallback`):
A :class:`~transformer.TrainerCallback` class or an instance of a :class:`~transformer.TrainerCallback`.
In the first case, will pop the first member of that class found in the list of callbacks.
Returns:
:class:`~transformer.TrainerCallback`: The callback removed, if found.
"""
return self.callback_handler.pop_callback(callback)
def remove_callback(self, callback):
"""
Remove a callback from the current list of :class:`~transformer.TrainerCallback`.
Args:
callback (:obj:`type` or :class:`~transformer.TrainerCallback`):
A :class:`~transformer.TrainerCallback` class or an instance of a :class:`~transformer.TrainerCallback`.
In the first case, will remove the first member of that class found in the list of callbacks.
"""
self.callback_handler.remove_callback(callback)
def _remove_unused_columns(self, dataset: "datasets.Dataset", description: Optional[str] = None):
if not self.args.remove_unused_columns:
return
# Inspect model forward signature to keep only the arguments it accepts.
signature = inspect.signature(self.model.forward)
signature_columns = list(signature.parameters.keys())
# Labels may be named label or label_ids, the default data collator handles that.
signature_columns += ["label", "label_ids"]
columns = [k for k in signature_columns if k in dataset.column_names]
ignored_columns = list(set(dataset.column_names) - set(signature_columns))
dset_description = "" if description is None else f"in the {description} set "
logger.info(
f"The following columns {dset_description}don't have a corresponding argument in `{self.model.__class__.__name__}.forward` and have been ignored: {', '.join(ignored_columns)}."
)
dataset.set_format(type=dataset.format["type"], columns=columns)
def _get_train_sampler(self) -> Optional[torch.utils.data.sampler.Sampler]:
if isinstance(self.train_dataset, torch.utils.data.IterableDataset) or not isinstance(
self.train_dataset, collections.abc.Sized
):
return None
# Gather the number of processes and this process index.
if self.args.parallel_mode == ParallelMode.TPU:
num_processes = xm.xrt_world_size()
process_index = xm.get_ordinal()
elif (
self.args.parallel_mode == ParallelMode.DISTRIBUTED
or self.args.parallel_mode == ParallelMode.SAGEMAKER_DISTRIBUTED
):
num_processes = dist.get_world_size()
process_index = dist.get_rank()
else:
num_processes = 1
process_index = 0
# Build the sampler.
if self.args.group_by_length:
if num_processes <= 1:
return LengthGroupedSampler(self.train_dataset, self.args.train_batch_size)
else:
return DistributedLengthGroupedSampler(
self.train_dataset, self.args.train_batch_size, num_replicas=num_processes, rank=process_index
)
else:
if num_processes <= 1:
return RandomSampler(self.train_dataset)
else:
return DistributedSampler(self.train_dataset, num_replicas=num_processes, rank=process_index)
def get_train_dataloader(self) -> DataLoader:
"""
Returns the training :class:`~torch.utils.data.DataLoader`.
Will use no sampler if :obj:`self.train_dataset` does not implement :obj:`__len__`, a random sampler (adapted
to distributed training if necessary) otherwise.
Subclass and override this method if you want to inject some custom behavior.
"""
if self.train_dataset is None:
raise ValueError("Trainer: training requires a train_dataset.")
train_sampler = self._get_train_sampler()
return DataLoader(
self.train_dataset,
batch_size=self.args.train_batch_size,
sampler=train_sampler,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
num_workers=self.args.dataloader_num_workers,
pin_memory=self.args.dataloader_pin_memory,
)
def _get_eval_sampler(self, eval_dataset: Dataset) -> Optional[torch.utils.data.sampler.Sampler]:
if is_torch_tpu_available():
return SequentialDistributedSampler(eval_dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal())
elif self.args.local_rank != -1:
return SequentialDistributedSampler(eval_dataset)
else:
return SequentialSampler(eval_dataset)
def get_eval_dataloader(self, eval_dataset: Optional[Dataset] = None) -> DataLoader:
"""
Returns the evaluation :class:`~torch.utils.data.DataLoader`.
Subclass and override this method if you want to inject some custom behavior.
Args:
eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
If provided, will override :obj:`self.eval_dataset`. If it is an :obj:`datasets.Dataset`, columns not
accepted by the ``model.forward()`` method are automatically removed. It must implement :obj:`__len__`.
"""
if eval_dataset is None and self.eval_dataset is None:
raise ValueError("Trainer: evaluation requires an eval_dataset.")
elif eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
elif is_datasets_available() and isinstance(eval_dataset, datasets.Dataset):
self._remove_unused_columns(eval_dataset, description="evaluation")
eval_dataset = eval_dataset if eval_dataset is not None else self.eval_dataset
eval_sampler = self._get_eval_sampler(eval_dataset)
return DataLoader(
eval_dataset,
sampler=eval_sampler,
batch_size=self.args.eval_batch_size,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
num_workers=self.args.dataloader_num_workers,
pin_memory=self.args.dataloader_pin_memory,
)
def get_test_dataloader(self, test_dataset: Dataset) -> DataLoader:
"""
Returns the test :class:`~torch.utils.data.DataLoader`.
Subclass and override this method if you want to inject some custom behavior.
Args:
test_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
The test dataset to use. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed. It must implement :obj:`__len__`.
"""
if not isinstance(test_dataset, collections.abc.Sized):
raise ValueError("test_dataset must implement __len__")
elif is_datasets_available() and isinstance(test_dataset, datasets.Dataset):
self._remove_unused_columns(test_dataset, description="test")
test_sampler = self._get_eval_sampler(test_dataset)
# We use the same batch_size as for eval.
return DataLoader(
test_dataset,
sampler=test_sampler,
batch_size=self.args.eval_batch_size,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
pin_memory=self.args.dataloader_pin_memory,
)
def create_optimizer_and_scheduler(self, num_training_steps: int):
"""
Setup the optimizer and the learning rate scheduler.
We provide a reasonable default that works well. If you want to use something else, you can pass a tuple in the
Trainer's init through :obj:`optimizers`, or subclass and override this method in a subclass.
"""
if self.optimizer is None:
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in self.model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": self.args.weight_decay,
},
{
"params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
]
optimizer_cls = Adafactor if self.args.adafactor else AdamW
if self.args.adafactor:
optimizer_cls = Adafactor
optimizer_kwargs = {"scale_parameter": False, "relative_step": False}
else:
optimizer_cls = AdamW
optimizer_kwargs = {
"betas": (self.args.adam_beta1, self.args.adam_beta2),
"eps": self.args.adam_epsilon,
}
optimizer_kwargs["lr"] = self.args.learning_rate
if self.sharded_dpp:
self.optimizer = OSS(
params=optimizer_grouped_parameters,
optim=optimizer_cls,
**optimizer_kwargs,
)
else:
self.optimizer = optimizer_cls(optimizer_grouped_parameters, **optimizer_kwargs)
if self.lr_scheduler is None:
self.lr_scheduler = get_scheduler(
self.args.lr_scheduler_type,
self.optimizer,
num_warmup_steps=self.args.warmup_steps,
num_training_steps=num_training_steps,
)
def num_examples(self, dataloader: DataLoader) -> int:
"""
Helper to get number of samples in a :class:`~torch.utils.data.DataLoader` by accessing its dataset.
Will raise an exception if the underlying dataset dese not implement method :obj:`__len__`
"""
return len(dataloader.dataset)
def _hp_search_setup(self, trial: Union["optuna.Trial", Dict[str, Any]]):
""" HP search setup code """
self._trial = trial
if self.hp_search_backend is None or trial is None:
return
params = self.hp_space(trial) if self.hp_search_backend == HPSearchBackend.OPTUNA else trial
for key, value in params.items():
if not hasattr(self.args, key):
raise AttributeError(
f"Trying to set {key} in the hyperparameter search but there is no corresponding field in `TrainingArguments`."
)
old_attr = getattr(self.args, key, None)
# Casting value to the proper type
if old_attr is not None:
value = type(old_attr)(value)
setattr(self.args, key, value)
if self.hp_search_backend == HPSearchBackend.OPTUNA:
logger.info("Trial:", trial.params)
def _report_to_hp_search(
self, trial: Union["optuna.Trial", Dict[str, Any]], epoch: int, metrics: Dict[str, float]
):
if self.hp_search_backend is None or trial is None:
return
self.objective = self.compute_objective(metrics.copy())
if self.hp_search_backend == HPSearchBackend.OPTUNA:
import optuna
trial.report(self.objective, epoch)
if trial.should_prune():
raise optuna.TrialPruned()
elif self.hp_search_backend == HPSearchBackend.RAY:
from ray import tune
if self.state.global_step % self.args.save_steps == 0:
self._tune_save_checkpoint()
tune.report(objective=self.objective, **metrics)
def _tune_save_checkpoint(self):
from ray import tune
if not self.use_tune_checkpoints:
return
with tune.checkpoint_dir(step=self.state.global_step) as checkpoint_dir:
self.args.output_dir = checkpoint_dir
output_dir = os.path.join(self.args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}")
self.save_model(output_dir)
if self.is_world_process_zero():
self.state.save_to_json(os.path.join(output_dir, "trainer_state.json"))
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
def call_model_init(self, trial=None):
model_init_argcount = len(inspect.signature(self.model_init).parameters)
if model_init_argcount == 0:
model = self.model_init()
elif model_init_argcount == 1:
model = self.model_init(trial)
else:
raise RuntimeError("model_init should have 0 or 1 argument.")
if model is None:
raise RuntimeError("model_init should not return None.")
return model
def train(
self,
resume_from_checkpoint: Optional[str] = None,
trial: Union["optuna.Trial", Dict[str, Any]] = None,
**kwargs,
):
"""
Main training entry point.
Args:
resume_from_checkpoint (:obj:`str`, `optional`):
Local path to a saved checkpoint as saved by a previous instance of :class:`~transformers.Trainer`. If
present, training will resume from the model/optimizer/scheduler states loaded here.
trial (:obj:`optuna.Trial` or :obj:`Dict[str, Any]`, `optional`):
The trial run or the hyperparameter dictionary for hyperparameter search.
kwargs:
Additional keyword arguments used to hide deprecated arguments
"""
if "model_path" in kwargs:
resume_from_checkpoint = kwargs.pop("model_path")
warnings.warn(
"`model_path` is deprecated and will be removed in a future version. Use `resume_from_checkpoint` "
"instead.",
FutureWarning,
)
if len(kwargs) > 0:
raise TypeError(f"train() received got unexpected keyword arguments: {', '.join(list(kwargs.keys()))}.")
# This might change the seed so needs to run first.
self._hp_search_setup(trial)
# Model re-init
model_reloaded = False
if self.model_init is not None:
# Seed must be set before instantiating the model when using model_init.
set_seed(self.args.seed)
self.model = self.call_model_init(trial)
model_reloaded = True
# Reinitializes optimizer and scheduler
self.optimizer, self.lr_scheduler = None, None
# Load potential model checkpoint
if resume_from_checkpoint is not None and os.path.isfile(os.path.join(resume_from_checkpoint, WEIGHTS_NAME)):
logger.info(f"Loading model from {resume_from_checkpoint}).")
if isinstance(self.model, PreTrainedModel):
self.model = self.model.from_pretrained(resume_from_checkpoint)
model_reloaded = True
else:
state_dict = torch.load(os.path.join(resume_from_checkpoint, WEIGHTS_NAME))
self.model.load_state_dict(state_dict)
# If model was re-initialized, put it on the right device and update self.model_wrapped
if model_reloaded:
if not self.is_model_parallel:
self.model = self.model.to(self.args.device)
self.model_wrapped = self.model
# Keeping track whether we can can len() on the dataset or not
train_dataset_is_sized = isinstance(self.train_dataset, collections.abc.Sized)
# Data loader and number of training steps
train_dataloader = self.get_train_dataloader()
# Setting up training control variables:
# number of training epochs: num_train_epochs
# number of training steps per epoch: num_update_steps_per_epoch
# total number of training steps to execute: max_steps
if train_dataset_is_sized:
num_update_steps_per_epoch = len(train_dataloader) // self.args.gradient_accumulation_steps
num_update_steps_per_epoch = max(num_update_steps_per_epoch, 1)
if self.args.max_steps > 0:
max_steps = self.args.max_steps
num_train_epochs = self.args.max_steps // num_update_steps_per_epoch + int(
self.args.max_steps % num_update_steps_per_epoch > 0
)
else:
max_steps = math.ceil(self.args.num_train_epochs * num_update_steps_per_epoch)
num_train_epochs = math.ceil(self.args.num_train_epochs)
else:
# see __init__. max_steps is set when the dataset has no __len__
max_steps = self.args.max_steps
num_train_epochs = 1
num_update_steps_per_epoch = max_steps
if self.args.deepspeed:
model, optimizer, lr_scheduler = init_deepspeed(self, num_training_steps=max_steps)
self.model = model.module
self.model_wrapped = model # will get further wrapped in DDP
self.deepspeed = model # DeepSpeedEngine object
self.optimizer = optimizer
self.lr_scheduler = lr_scheduler
else:
self.create_optimizer_and_scheduler(num_training_steps=max_steps)
self.state = TrainerState()
self.state.is_hyper_param_search = trial is not None
# Check if saved optimizer or scheduler states exist
self._load_optimizer_and_scheduler(resume_from_checkpoint)
model = self.model_wrapped
# Mixed precision training with apex (torch < 1.6)
if self.use_apex:
model, self.optimizer = amp.initialize(model, self.optimizer, opt_level=self.args.fp16_opt_level)
# Multi-gpu training (should be after apex fp16 initialization)
if self.args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Distributed training (should be after apex fp16 initialization)
if self.sharded_dpp:
model = ShardedDDP(model, self.optimizer)
elif is_sagemaker_distributed_available():
model = DDP(model, device_ids=[dist.get_local_rank()], broadcast_buffers=False)
elif self.args.local_rank != -1:
if self.args.ddp_find_unused_parameters is not None:
find_unused_parameters = self.args.ddp_find_unused_parameters
elif isinstance(model, PreTrainedModel):
# find_unused_parameters breaks checkpointing as per
# https://github.com/huggingface/transformers/pull/4659#issuecomment-643356021
find_unused_parameters = not getattr(model.config, "gradient_checkpointing", False)
else:
find_unused_parameters = True
model = torch.nn.parallel.DistributedDataParallel(
model,
device_ids=[self.args.local_rank],
output_device=self.args.local_rank,
find_unused_parameters=find_unused_parameters,
)
# for the rest of this function `model` is the outside model, whether it was wrapped or not
if model is not self.model:
self.model_wrapped = model
# important: at this point:
# self.model is the Transformers Model
# self.model_wrapped is DDP(Transformers Model), DDP(Deepspeed(Transformers Model)), etc.
# Train!
if is_torch_tpu_available():
world_size = xm.xrt_world_size()
elif self.args.local_rank != -1:
world_size = dist.get_world_size()
else:
world_size = 1
total_train_batch_size = self.args.train_batch_size * self.args.gradient_accumulation_steps * world_size
num_examples = (
self.num_examples(train_dataloader)
if train_dataset_is_sized
else total_train_batch_size * self.args.max_steps
)
logger.info("***** Running training *****")
logger.info(f" Num examples = {num_examples}")
logger.info(f" Num Epochs = {num_train_epochs}")
logger.info(f" Instantaneous batch size per device = {self.args.per_device_train_batch_size}")
logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_train_batch_size}")
logger.info(f" Gradient Accumulation steps = {self.args.gradient_accumulation_steps}")
logger.info(f" Total optimization steps = {max_steps}")
self.state.epoch = 0
start_time = time.time()
epochs_trained = 0
steps_trained_in_current_epoch = 0
# Check if continuing training from a checkpoint
if resume_from_checkpoint is not None and os.path.isfile(
os.path.join(resume_from_checkpoint, "trainer_state.json")
):
self.state = TrainerState.load_from_json(os.path.join(resume_from_checkpoint, "trainer_state.json"))
epochs_trained = self.state.global_step // num_update_steps_per_epoch
if not self.args.ignore_data_skip:
steps_trained_in_current_epoch = self.state.global_step % (num_update_steps_per_epoch)
steps_trained_in_current_epoch *= self.args.gradient_accumulation_steps
else:
steps_trained_in_current_epoch = 0
logger.info(" Continuing training from checkpoint, will skip to saved global_step")
logger.info(f" Continuing training from epoch {epochs_trained}")
logger.info(f" Continuing training from global step {self.state.global_step}")
if not self.args.ignore_data_skip:
logger.info(
f" Will skip the first {epochs_trained} epochs then the first {steps_trained_in_current_epoch} "
"batches in the first epoch."
)
# Update the references
self.callback_handler.model = self.model
self.callback_handler.optimizer = self.optimizer
self.callback_handler.lr_scheduler = self.lr_scheduler
self.callback_handler.train_dataloader = train_dataloader
self.state.trial_name = self.hp_name(trial) if self.hp_name is not None else None
self.state.trial_params = hp_params(trial) if trial is not None else None
# This should be the same if the state has been saved but in case the training arguments changed, it's safer
# to set this after the load.
self.state.max_steps = max_steps
self.state.num_train_epochs = num_train_epochs
self.state.is_local_process_zero = self.is_local_process_zero()
self.state.is_world_process_zero = self.is_world_process_zero()
# tr_loss is a tensor to avoid synchronization of TPUs through .item()
tr_loss = torch.tensor(0.0).to(self.args.device)
# _total_loss_scalar is updated everytime .item() has to be called on tr_loss and stores the sum of all losses
self._total_loss_scalar = 0.0
self._globalstep_last_logged = self.state.global_step
self._total_flos = self.state.total_flos
model.zero_grad()
self.control = self.callback_handler.on_train_begin(self.args, self.state, self.control)
# Skip the first epochs_trained epochs to get the random state of the dataloader at the right point.
if not self.args.ignore_data_skip:
for epoch in range(epochs_trained):
# We just need to begin an iteration to create the randomization of the sampler.
for _ in train_dataloader:
break
for epoch in range(epochs_trained, num_train_epochs):
if isinstance(train_dataloader, DataLoader) and isinstance(train_dataloader.sampler, DistributedSampler):
train_dataloader.sampler.set_epoch(epoch)
if is_torch_tpu_available():
parallel_loader = pl.ParallelLoader(train_dataloader, [self.args.device]).per_device_loader(
self.args.device
)
epoch_iterator = parallel_loader
else:
epoch_iterator = train_dataloader
# Reset the past mems state at the beginning of each epoch if necessary.
if self.args.past_index >= 0:
self._past = None
steps_in_epoch = len(epoch_iterator) if train_dataset_is_sized else self.args.max_steps
self.control = self.callback_handler.on_epoch_begin(self.args, self.state, self.control)
for step, inputs in enumerate(epoch_iterator):
# Skip past any already trained steps if resuming training
if steps_trained_in_current_epoch > 0:
steps_trained_in_current_epoch -= 1
continue
if (step + 1) % self.args.gradient_accumulation_steps == 0:
self.control = self.callback_handler.on_step_begin(self.args, self.state, self.control)
if ((step + 1) % self.args.gradient_accumulation_steps != 0) and self.args.local_rank != -1:
# Avoid unnecessary DDP synchronization since there will be no backward pass on this example.
with model.no_sync():
tr_loss += self.training_step(model, inputs)
else:
tr_loss += self.training_step(model, inputs)
self._total_flos += self.floating_point_ops(inputs)
if (step + 1) % self.args.gradient_accumulation_steps == 0 or (
# last step in epoch but step is always smaller than gradient_accumulation_steps
steps_in_epoch <= self.args.gradient_accumulation_steps
and (step + 1) == steps_in_epoch
):
# Gradient clipping
if self.args.max_grad_norm is not None and self.args.max_grad_norm > 0 and not self.deepspeed:
# deepspeed does its own clipping
if self.use_amp:
# AMP: gradients need unscaling
self.scaler.unscale_(self.optimizer)
if hasattr(self.optimizer, "clip_grad_norm"):
# Some optimizers (like the sharded optimizer) have a specific way to do gradient clipping
self.optimizer.clip_grad_norm(self.args.max_grad_norm)
else:
# Revert to normal clipping otherwise, handling Apex or full precision
torch.nn.utils.clip_grad_norm_(
amp.master_params(self.optimizer) if self.use_apex else model.parameters(),
self.args.max_grad_norm,
)
# Optimizer step
if self.deepspeed:
self.deepspeed.step()
elif is_torch_tpu_available():
xm.optimizer_step(self.optimizer)
elif self.use_amp:
self.scaler.step(self.optimizer)
self.scaler.update()
else:
self.optimizer.step()
self.lr_scheduler.step()
model.zero_grad()
self.state.global_step += 1
self.state.epoch = epoch + (step + 1) / steps_in_epoch
self.control = self.callback_handler.on_step_end(self.args, self.state, self.control)
self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)
if self.control.should_epoch_stop or self.control.should_training_stop:
break
self.control = self.callback_handler.on_epoch_end(self.args, self.state, self.control)
self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)
if self.args.tpu_metrics_debug or self.args.debug:
if is_torch_tpu_available():
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
xm.master_print(met.metrics_report())
else:
logger.warning(
"You enabled PyTorch/XLA debug metrics but you don't have a TPU "
"configured. Check your training configuration if this is unexpected."
)
if self.control.should_training_stop:
break
if self.args.past_index and hasattr(self, "_past"):
# Clean the state at the end of training
delattr(self, "_past")
logger.info("\n\nTraining completed. Do not forget to share your model on huggingface.co/models =)\n\n")
if self.args.load_best_model_at_end and self.state.best_model_checkpoint is not None:
logger.info(
f"Loading best model from {self.state.best_model_checkpoint} (score: {self.state.best_metric})."
)
if isinstance(self.model, PreTrainedModel):
self.model = self.model.from_pretrained(self.state.best_model_checkpoint)
if not self.is_model_parallel:
self.model = self.model.to(self.args.device)
else:
state_dict = torch.load(os.path.join(self.state.best_model_checkpoint, WEIGHTS_NAME))
self.model.load_state_dict(state_dict)
if self.deepspeed:
self.deepspeed.load_checkpoint(
self.state.best_model_checkpoint, load_optimizer_states=False, load_lr_scheduler_states=False
)
metrics = speed_metrics("train", start_time, self.state.max_steps)
if self._total_flos is not None:
self.store_flos()
metrics["total_flos"] = self.state.total_flos
self.log(metrics)
self.control = self.callback_handler.on_train_end(self.args, self.state, self.control)
# add remaining tr_loss
self._total_loss_scalar += tr_loss.item()
return TrainOutput(self.state.global_step, self._total_loss_scalar / self.state.global_step, metrics)
def _maybe_log_save_evaluate(self, tr_loss, model, trial, epoch):
if self.control.should_log:
logs: Dict[str, float] = {}
tr_loss_scalar = tr_loss.item()
# reset tr_loss to zero
tr_loss -= tr_loss
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
# backward compatibility for pytorch schedulers
logs["learning_rate"] = (
self.lr_scheduler.get_last_lr()[0]
if version.parse(torch.__version__) >= version.parse("1.4")
else self.lr_scheduler.get_lr()[0]
)
self._total_loss_scalar += tr_loss_scalar
self._globalstep_last_logged = self.state.global_step
self.log(logs)
metrics = None
if self.control.should_evaluate:
metrics = self.evaluate()
self._report_to_hp_search(trial, epoch, metrics)
if self.control.should_save:
self._save_checkpoint(model, trial, metrics=metrics)
self.control = self.callback_handler.on_save(self.args, self.state, self.control)
def _save_checkpoint(self, model, trial, metrics=None):
# In all cases, including ddp/dp/deepspeed, self.model is always a reference to the model we
# want to save.
assert _model_unwrap(model) is self.model, "internal model should be a reference to self.model"
# Save model checkpoint
checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}"
if self.hp_search_backend is not None and trial is not None:
if self.hp_search_backend == HPSearchBackend.OPTUNA:
run_id = trial.number
else:
from ray import tune
run_id = tune.get_trial_id()
run_name = self.hp_name(trial) if self.hp_name is not None else f"run-{run_id}"
output_dir = os.path.join(self.args.output_dir, run_name, checkpoint_folder)
else:
output_dir = os.path.join(self.args.output_dir, checkpoint_folder)
self.store_flos()
self.save_model(output_dir)
if self.deepspeed:
self.deepspeed.save_checkpoint(output_dir)
# Save optimizer and scheduler
if self.sharded_dpp:
self.optimizer.consolidate_state_dict()
if is_torch_tpu_available():
xm.rendezvous("saving_optimizer_states")
xm.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
with warnings.catch_warnings(record=True) as caught_warnings:
xm.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
reissue_pt_warnings(caught_warnings)
elif self.is_world_process_zero() and not self.deepspeed:
# deepspeed.save_checkpoint above saves model/optim/sched
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
with warnings.catch_warnings(record=True) as caught_warnings:
torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
reissue_pt_warnings(caught_warnings)
# Determine the new best metric / best model checkpoint
if metrics is not None and self.args.metric_for_best_model is not None:
metric_to_check = self.args.metric_for_best_model
if not metric_to_check.startswith("eval_"):
metric_to_check = f"eval_{metric_to_check}"
metric_value = metrics[metric_to_check]
operator = np.greater if self.args.greater_is_better else np.less
if (
self.state.best_metric is None
or self.state.best_model_checkpoint is None
or operator(metric_value, self.state.best_metric)
):
self.state.best_metric = metric_value
self.state.best_model_checkpoint = output_dir
# Save the Trainer state
if self.is_world_process_zero():
self.state.save_to_json(os.path.join(output_dir, "trainer_state.json"))
# Maybe delete some older checkpoints.
if self.is_world_process_zero():
self._rotate_checkpoints(use_mtime=True)
def _load_optimizer_and_scheduler(self, checkpoint):
"""If optimizer and scheduler states exist, load them."""
if checkpoint is None:
return
if os.path.isfile(os.path.join(checkpoint, "optimizer.pt")) and os.path.isfile(
os.path.join(checkpoint, "scheduler.pt")
):
# Load in optimizer and scheduler states
if is_torch_tpu_available():
# On TPU we have to take some extra precautions to properly load the states on the right device.
optimizer_state = torch.load(os.path.join(checkpoint, "optimizer.pt"), map_location="cpu")
with warnings.catch_warnings(record=True) as caught_warnings:
lr_scheduler_state = torch.load(os.path.join(checkpoint, "scheduler.pt"), map_location="cpu")
reissue_pt_warnings(caught_warnings)
xm.send_cpu_data_to_device(optimizer_state, self.args.device)
xm.send_cpu_data_to_device(lr_scheduler_state, self.args.device)
self.optimizer.load_state_dict(optimizer_state)
self.lr_scheduler.load_state_dict(lr_scheduler_state)
else:
self.optimizer.load_state_dict(
torch.load(os.path.join(checkpoint, "optimizer.pt"), map_location=self.args.device)
)
with warnings.catch_warnings(record=True) as caught_warnings:
self.lr_scheduler.load_state_dict(torch.load(os.path.join(checkpoint, "scheduler.pt")))
reissue_pt_warnings(caught_warnings)
if self.deepspeed:
# Not sure how to check if there is a saved deepspeed checkpoint, but since it just return None if it fails to find a deepspeed checkpoint this is sort of a check-n-load function
self.deepspeed.load_checkpoint(checkpoint, load_optimizer_states=True, load_lr_scheduler_states=True)
def hyperparameter_search(
self,
hp_space: Optional[Callable[["optuna.Trial"], Dict[str, float]]] = None,
compute_objective: Optional[Callable[[Dict[str, float]], float]] = None,
n_trials: int = 20,
direction: str = "minimize",
backend: Optional[Union["str", HPSearchBackend]] = None,
hp_name: Optional[Callable[["optuna.Trial"], str]] = None,
**kwargs,
) -> BestRun:
"""
Launch an hyperparameter search using ``optuna`` or ``Ray Tune``. The optimized quantity is determined by
:obj:`compute_objective`, which defaults to a function returning the evaluation loss when no metric is
provided, the sum of all metrics otherwise.
.. warning::
To use this method, you need to have provided a ``model_init`` when initializing your
:class:`~transformers.Trainer`: we need to reinitialize the model at each new run. This is incompatible
with the ``optimizers`` argument, so you need to subclass :class:`~transformers.Trainer` and override the
method :meth:`~transformers.Trainer.create_optimizer_and_scheduler` for custom optimizer/scheduler.
Args:
hp_space (:obj:`Callable[["optuna.Trial"], Dict[str, float]]`, `optional`):
A function that defines the hyperparameter search space. Will default to
:func:`~transformers.trainer_utils.default_hp_space_optuna` or
:func:`~transformers.trainer_utils.default_hp_space_ray` depending on your backend.
compute_objective (:obj:`Callable[[Dict[str, float]], float]`, `optional`):
A function computing the objective to minimize or maximize from the metrics returned by the
:obj:`evaluate` method. Will default to :func:`~transformers.trainer_utils.default_compute_objective`.
n_trials (:obj:`int`, `optional`, defaults to 100):
The number of trial runs to test.
direction(:obj:`str`, `optional`, defaults to :obj:`"minimize"`):
Whether to optimize greater or lower objects. Can be :obj:`"minimize"` or :obj:`"maximize"`, you should
pick :obj:`"minimize"` when optimizing the validation loss, :obj:`"maximize"` when optimizing one or
several metrics.
backend(:obj:`str` or :class:`~transformers.training_utils.HPSearchBackend`, `optional`):
The backend to use for hyperparameter search. Will default to optuna or Ray Tune, depending on which
one is installed. If both are installed, will default to optuna.
kwargs:
Additional keyword arguments passed along to :obj:`optuna.create_study` or :obj:`ray.tune.run`. For
more information see:
- the documentation of `optuna.create_study
<https://optuna.readthedocs.io/en/stable/reference/alias_generated/optuna.create_study.html#optuna.create_study>`__
- the documentation of `tune.run
<https://docs.ray.io/en/latest/tune/api_docs/execution.html#tune-run>`__
Returns:
:class:`transformers.trainer_utils.BestRun`: All the information about the best run.
"""
if backend is None:
backend = default_hp_search_backend()
if backend is None:
raise RuntimeError(
"At least one of optuna or ray should be installed. "
"To install optuna run `pip install optuna`."
"To install ray run `pip install ray[tune]`."
)
backend = HPSearchBackend(backend)
if backend == HPSearchBackend.OPTUNA and not is_optuna_available():
raise RuntimeError("You picked the optuna backend, but it is not installed. Use `pip install optuna`.")
if backend == HPSearchBackend.RAY and not is_ray_tune_available():
raise RuntimeError(
"You picked the Ray Tune backend, but it is not installed. Use `pip install 'ray[tune]'`."
)
self.hp_search_backend = backend
if self.model_init is None:
raise RuntimeError(
"To use hyperparameter search, you need to pass your model through a model_init function."
)
self.hp_space = default_hp_space[backend] if hp_space is None else hp_space
self.hp_name = hp_name
self.compute_objective = default_compute_objective if compute_objective is None else compute_objective
run_hp_search = run_hp_search_optuna if backend == HPSearchBackend.OPTUNA else run_hp_search_ray
best_run = run_hp_search(self, n_trials, direction, **kwargs)
self.hp_search_backend = None
return best_run
def log(self, logs: Dict[str, float]) -> None:
"""
Log :obj:`logs` on the various objects watching training.
Subclass and override this method to inject custom behavior.
Args:
logs (:obj:`Dict[str, float]`):
The values to log.
"""
if self.state.epoch is not None:
logs["epoch"] = round(self.state.epoch, 2)
output = {**logs, **{"step": self.state.global_step}}
self.state.log_history.append(output)
self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs)
def _prepare_inputs(self, inputs: Dict[str, Union[torch.Tensor, Any]]) -> Dict[str, Union[torch.Tensor, Any]]:
"""
Prepare :obj:`inputs` before feeding them to the model, converting them to tensors if they are not already and
handling potential state.
"""
for k, v in inputs.items():
if isinstance(v, torch.Tensor):
inputs[k] = v.to(self.args.device)
if self.args.past_index >= 0 and self._past is not None:
inputs["mems"] = self._past
return inputs
def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor:
"""
Perform a training step on a batch of inputs.
Subclass and override to inject custom behavior.
Args:
model (:obj:`nn.Module`):
The model to train.
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
The dictionary will be unpacked before being fed to the model. Most models expect the targets under the
argument :obj:`labels`. Check your model's documentation for all accepted arguments.
Return:
:obj:`torch.Tensor`: The tensor with training loss on this batch.
"""
model.train()
inputs = self._prepare_inputs(inputs)
if self.use_amp:
with autocast():
loss = self.compute_loss(model, inputs)
else:
loss = self.compute_loss(model, inputs)
if self.args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel training
if self.args.gradient_accumulation_steps > 1:
loss = loss / self.args.gradient_accumulation_steps
if self.use_amp:
self.scaler.scale(loss).backward()
elif self.use_apex:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
elif self.deepspeed:
self.deepspeed.backward(loss)
else:
loss.backward()
return loss.detach()
def compute_loss(self, model, inputs, return_outputs=False):
"""
How the loss is computed by Trainer. By default, all models return the loss in the first element.
Subclass and override for custom behavior.
"""
if self.label_smoother is not None and "labels" in inputs:
labels = inputs.pop("labels")
else:
labels = None
outputs = model(**inputs)
# Save past state if it exists
# TODO: this needs to be fixed and made cleaner later.
if self.args.past_index >= 0:
self._past = outputs[self.args.past_index]
if labels is not None:
loss = self.label_smoother(outputs, labels)
else:
# We don't use .loss here since the model may return tuples instead of ModelOutput.
loss = outputs["loss"] if isinstance(outputs, dict) else outputs[0]
return (loss, outputs) if return_outputs else loss
def is_local_process_zero(self) -> bool:
"""
Whether or not this process is the local (e.g., on one machine if training in a distributed fashion on several
machines) main process.
"""
if is_torch_tpu_available():
return xm.is_master_ordinal(local=True)
else:
return self.args.local_rank in [-1, 0]
def is_world_process_zero(self) -> bool:
"""
Whether or not this process is the global main process (when training in a distributed fashion on several
machines, this is only going to be :obj:`True` for one process).
"""
if is_torch_tpu_available():
return xm.is_master_ordinal(local=False)
else:
return self.args.local_rank == -1 or dist.get_rank() == 0
def save_model(self, output_dir: Optional[str] = None):
"""
Will save the model, so you can reload it using :obj:`from_pretrained()`.
Will only save from the world_master process (unless in TPUs).
"""
if is_torch_tpu_available():
self._save_tpu(output_dir)
elif self.is_world_process_zero():
self._save(output_dir)
# If on sagemaker and we are saving the main model (not a checkpoint so output_dir=None), save a copy to
# SM_MODEL_DIR for easy deployment.
if output_dir is None and os.getenv("SM_MODEL_DIR") is not None:
self.save_model(output_dir=os.getenv("SM_MODEL_DIR"))
def _save_tpu(self, output_dir: Optional[str] = None):
output_dir = output_dir if output_dir is not None else self.args.output_dir
logger.info("Saving model checkpoint to %s", output_dir)
if xm.is_master_ordinal():
os.makedirs(output_dir, exist_ok=True)
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
# Save a trained model and configuration using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
xm.rendezvous("saving_checkpoint")
if not isinstance(self.model, PreTrainedModel):
logger.info("Trainer.model is not a `PreTrainedModel`, only saving its state dict.")
state_dict = self.model.state_dict()
xm.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
else:
self.model.save_pretrained(output_dir)
if self.tokenizer is not None and self.is_world_process_zero():
self.tokenizer.save_pretrained(output_dir)
def _save(self, output_dir: Optional[str] = None):
output_dir = output_dir if output_dir is not None else self.args.output_dir
os.makedirs(output_dir, exist_ok=True)
logger.info("Saving model checkpoint to %s", output_dir)
# Save a trained model and configuration using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
if not isinstance(self.model, PreTrainedModel):
logger.info("Trainer.model is not a `PreTrainedModel`, only saving its state dict.")
state_dict = self.model.state_dict()
torch.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
else:
self.model.save_pretrained(output_dir)
if self.tokenizer is not None and self.is_world_process_zero():
self.tokenizer.save_pretrained(output_dir)
# Good practice: save your training arguments together with the trained model
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
def store_flos(self):
# Storing the number of floating-point operations that went into the model
if self._total_flos is not None:
if self.args.local_rank != -1:
self.state.total_flos = distributed_broadcast_scalars([self._total_flos]).sum().item()
else:
self.state.total_flos = self._total_flos
def _sorted_checkpoints(self, checkpoint_prefix=PREFIX_CHECKPOINT_DIR, use_mtime=False) -> List[str]:
ordering_and_checkpoint_path = []
glob_checkpoints = [str(x) for x in Path(self.args.output_dir).glob(f"{checkpoint_prefix}-*")]
for path in glob_checkpoints:
if use_mtime:
ordering_and_checkpoint_path.append((os.path.getmtime(path), path))
else:
regex_match = re.match(f".*{checkpoint_prefix}-([0-9]+)", path)
if regex_match and regex_match.groups():
ordering_and_checkpoint_path.append((int(regex_match.groups()[0]), path))
checkpoints_sorted = sorted(ordering_and_checkpoint_path)
checkpoints_sorted = [checkpoint[1] for checkpoint in checkpoints_sorted]
# Make sure we don't delete the best model.
if self.state.best_model_checkpoint is not None:
best_model_index = checkpoints_sorted.index(str(Path(self.state.best_model_checkpoint)))
checkpoints_sorted[best_model_index], checkpoints_sorted[-1] = (
checkpoints_sorted[-1],
checkpoints_sorted[best_model_index],
)
return checkpoints_sorted
def _rotate_checkpoints(self, use_mtime=False) -> None:
if self.args.save_total_limit is None or self.args.save_total_limit <= 0:
return
# Check if we should delete older checkpoint(s)
checkpoints_sorted = self._sorted_checkpoints(use_mtime=use_mtime)
if len(checkpoints_sorted) <= self.args.save_total_limit:
return
number_of_checkpoints_to_delete = max(0, len(checkpoints_sorted) - self.args.save_total_limit)
checkpoints_to_be_deleted = checkpoints_sorted[:number_of_checkpoints_to_delete]
for checkpoint in checkpoints_to_be_deleted:
logger.info("Deleting older checkpoint [{}] due to args.save_total_limit".format(checkpoint))
shutil.rmtree(checkpoint)
def evaluate(
self,
eval_dataset: Optional[Dataset] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "eval",
) -> Dict[str, float]:
"""
Run evaluation and returns metrics.
The calling script will be responsible for providing a method to compute metrics, as they are task-dependent
(pass it to the init :obj:`compute_metrics` argument).
You can also subclass and override this method to inject custom behavior.
Args:
eval_dataset (:obj:`Dataset`, `optional`):
Pass a dataset if you wish to override :obj:`self.eval_dataset`. If it is an :obj:`datasets.Dataset`,
columns not accepted by the ``model.forward()`` method are automatically removed. It must implement the
:obj:`__len__` method.
ignore_keys (:obj:`Lst[str]`, `optional`):
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions.
metric_key_prefix (:obj:`str`, `optional`, defaults to :obj:`"eval"`):
An optional prefix to be used as the metrics key prefix. For example the metrics "bleu" will be named
"eval_bleu" if the prefix is "eval" (default)
Returns:
A dictionary containing the evaluation loss and the potential metrics computed from the predictions. The
dictionary also contains the epoch number which comes from the training state.
"""
if eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
eval_dataloader = self.get_eval_dataloader(eval_dataset)
start_time = time.time()
output = self.prediction_loop(
eval_dataloader,
description="Evaluation",
# No point gathering the predictions if there are no metrics, otherwise we defer to
# self.args.prediction_loss_only
prediction_loss_only=True if self.compute_metrics is None else None,
ignore_keys=ignore_keys,
metric_key_prefix=metric_key_prefix,
)
n_samples = len(eval_dataset if eval_dataset is not None else self.eval_dataset)
output.metrics.update(speed_metrics(metric_key_prefix, start_time, n_samples))
self.log(output.metrics)
if self.args.tpu_metrics_debug or self.args.debug:
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
xm.master_print(met.metrics_report())
self.control = self.callback_handler.on_evaluate(self.args, self.state, self.control, output.metrics)
return output.metrics
def predict(
self, test_dataset: Dataset, ignore_keys: Optional[List[str]] = None, metric_key_prefix: str = "eval"
) -> PredictionOutput:
"""
Run prediction and returns predictions and potential metrics.
Depending on the dataset and your use case, your test dataset may contain labels. In that case, this method
will also return metrics, like in :obj:`evaluate()`.
Args:
test_dataset (:obj:`Dataset`):
Dataset to run the predictions on. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed. Has to implement the method :obj:`__len__`
ignore_keys (:obj:`Lst[str]`, `optional`):
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions.
metric_key_prefix (:obj:`str`, `optional`, defaults to :obj:`"eval"`):
An optional prefix to be used as the metrics key prefix. For example the metrics "bleu" will be named
"eval_bleu" if the prefix is "eval" (default)
.. note::
If your predictions or labels have different sequence length (for instance because you're doing dynamic
padding in a token classification task) the predictions will be padded (on the right) to allow for
concatenation into one array. The padding index is -100.
Returns: `NamedTuple` A namedtuple with the following keys:
- predictions (:obj:`np.ndarray`): The predictions on :obj:`test_dataset`.
- label_ids (:obj:`np.ndarray`, `optional`): The labels (if the dataset contained some).
- metrics (:obj:`Dict[str, float]`, `optional`): The potential dictionary of metrics (if the dataset
contained labels).
"""
if test_dataset is not None and not isinstance(test_dataset, collections.abc.Sized):
raise ValueError("test_dataset must implement __len__")
test_dataloader = self.get_test_dataloader(test_dataset)
start_time = time.time()
output = self.prediction_loop(
test_dataloader, description="Prediction", ignore_keys=ignore_keys, metric_key_prefix=metric_key_prefix
)
output.metrics.update(speed_metrics(metric_key_prefix, start_time, len(test_dataset)))
return output
def prediction_loop(
self,
dataloader: DataLoader,
description: str,
prediction_loss_only: Optional[bool] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "eval",
) -> PredictionOutput:
"""
Prediction/evaluation loop, shared by :obj:`Trainer.evaluate()` and :obj:`Trainer.predict()`.
Works both with or without labels.
"""
if not isinstance(dataloader.dataset, collections.abc.Sized):
raise ValueError("dataset must implement __len__")
prediction_loss_only = (
prediction_loss_only if prediction_loss_only is not None else self.args.prediction_loss_only
)
model = self.model
# multi-gpu eval
if self.args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Note: in torch.distributed mode, there's no point in wrapping the model
# inside a DistributedDataParallel as we'll be under `no_grad` anyways.
batch_size = dataloader.batch_size
num_examples = self.num_examples(dataloader)
logger.info("***** Running %s *****", description)
logger.info(" Num examples = %d", num_examples)
logger.info(" Batch size = %d", batch_size)
losses_host: torch.Tensor = None
preds_host: Union[torch.Tensor, List[torch.Tensor]] = None
labels_host: Union[torch.Tensor, List[torch.Tensor]] = None
world_size = 1
if is_torch_tpu_available():
world_size = xm.xrt_world_size()
elif self.args.local_rank != -1:
world_size = dist.get_world_size()
world_size = max(1, world_size)
eval_losses_gatherer = DistributedTensorGatherer(world_size, num_examples, make_multiple_of=batch_size)
if not prediction_loss_only:
preds_gatherer = DistributedTensorGatherer(world_size, num_examples)
labels_gatherer = DistributedTensorGatherer(world_size, num_examples)
model.eval()
if is_torch_tpu_available():
dataloader = pl.ParallelLoader(dataloader, [self.args.device]).per_device_loader(self.args.device)
if self.args.past_index >= 0:
self._past = None
self.callback_handler.eval_dataloader = dataloader
for step, inputs in enumerate(dataloader):
loss, logits, labels = self.prediction_step(model, inputs, prediction_loss_only, ignore_keys=ignore_keys)
if loss is not None:
losses = loss.repeat(batch_size)
losses_host = losses if losses_host is None else torch.cat((losses_host, losses), dim=0)
if logits is not None:
preds_host = logits if preds_host is None else nested_concat(preds_host, logits, padding_index=-100)
if labels is not None:
labels_host = labels if labels_host is None else nested_concat(labels_host, labels, padding_index=-100)
self.control = self.callback_handler.on_prediction_step(self.args, self.state, self.control)
# Gather all tensors and put them back on the CPU if we have done enough accumulation steps.
if self.args.eval_accumulation_steps is not None and (step + 1) % self.args.eval_accumulation_steps == 0:
eval_losses_gatherer.add_arrays(self._gather_and_numpify(losses_host, "eval_losses"))
if not prediction_loss_only:
preds_gatherer.add_arrays(self._gather_and_numpify(preds_host, "eval_preds"))
labels_gatherer.add_arrays(self._gather_and_numpify(labels_host, "eval_label_ids"))
# Set back to None to begin a new accumulation
losses_host, preds_host, labels_host = None, None, None
if self.args.past_index and hasattr(self, "_past"):
# Clean the state at the end of the evaluation loop
delattr(self, "_past")
# Gather all remaining tensors and put them back on the CPU
eval_losses_gatherer.add_arrays(self._gather_and_numpify(losses_host, "eval_losses"))
if not prediction_loss_only:
preds_gatherer.add_arrays(self._gather_and_numpify(preds_host, "eval_preds"))
labels_gatherer.add_arrays(self._gather_and_numpify(labels_host, "eval_label_ids"))
eval_loss = eval_losses_gatherer.finalize()
preds = preds_gatherer.finalize() if not prediction_loss_only else None
label_ids = labels_gatherer.finalize() if not prediction_loss_only else None
if self.compute_metrics is not None and preds is not None and label_ids is not None:
metrics = self.compute_metrics(EvalPrediction(predictions=preds, label_ids=label_ids))
else:
metrics = {}
if eval_loss is not None:
metrics[f"{metric_key_prefix}_loss"] = eval_loss.mean().item()
# Prefix all keys with metric_key_prefix + '_'
for key in list(metrics.keys()):
if not key.startswith(f"{metric_key_prefix}_"):
metrics[f"{metric_key_prefix}_{key}"] = metrics.pop(key)
return PredictionOutput(predictions=preds, label_ids=label_ids, metrics=metrics)
def _gather_and_numpify(self, tensors, name):
"""
|
prediction_step(
self,
model: nn.Module,
inputs: Dict[str, Union[torch.Tensor, Any]],
prediction_loss_only: bool,
ignore_keys: Optional[List[str]] = None,
) -> Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]:
"""
Perform an evaluation step on :obj:`model` using obj:`inputs`.
Subclass and override to inject custom behavior.
Args:
model (:obj:`nn.Module`):
The model to evaluate.
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
The dictionary will be unpacked before being fed to the model. Most models expect the targets under the
argument :obj:`labels`. Check your model's documentation for all accepted arguments.
prediction_loss_only (:obj:`bool`):
Whether or not to return the loss only.
ignore_keys (:obj:`Lst[str]`, `optional`):
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions.
Return:
Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]: A tuple with the loss, logits and
labels (each being optional).
"""
has_labels = all(inputs.get(k) is not None for k in self.label_names)
inputs = self._prepare_inputs(inputs)
if ignore_keys is None:
if hasattr(self.model, "config"):
ignore_keys = getattr(self.model.config, "keys_to_ignore_at_inference", [])
else:
ignore_keys = []
with torch.no_grad():
if has_labels:
loss, outputs = self.compute_loss(model, inputs, return_outputs=True)
loss = loss.mean().detach()
if isinstance(outputs, dict):
logits = tuple(v for k, v in outputs.items() if k not in ignore_keys + ["loss"])
else:
logits = outputs[1:]
else:
loss = None
if self.use_amp:
with autocast():
outputs = model(**inputs)
else:
outputs = model(**inputs)
if isinstance(outputs, dict):
logits = tuple(v for k, v in outputs.items() if k not in ignore_keys)
else:
logits = outputs
# TODO: this needs to be fixed and made cleaner later.
if self.args.past_index >= 0:
self._past = outputs[self.args.past_index - 1]
if prediction_loss_only:
return (loss, None, None)
logits = nested_detach(logits)
if len(logits) == 1:
logits = logits[0]
if has_labels:
labels = nested_detach(tuple(inputs.get(name) for name in self.label_names))
if len(labels) == 1:
labels = labels[0]
else:
labels = None
return (loss, logits, labels)
def floating_point_ops(self, inputs: Dict[str, Union[torch.Tensor, Any]]):
"""
For models that inherit from :class:`~transformers.PreTrainedModel`, uses that method to compute the number of
floating point operations for every backward + forward pass. If using another model, either implement such a
method in the model or subclass and override this method.
Args:
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
Returns:
:obj:`int`: The number of floating-point operations.
"""
if hasattr(self.model, "floating_point_ops"):
return self.model.floating_point_ops(inputs)
else:
return 0
|
Gather value of `tensors` (tensor or list/tuple of nested tensors) and convert them to numpy before
concatenating them to `gathered`
"""
if tensors is None:
return
if is_torch_tpu_available():
tensors = nested_xla_mesh_reduce(tensors, name)
elif self.args.local_rank != -1:
tensors = distributed_concat(tensors)
return nested_numpify(tensors)
def
|
datasets.py
|
# Copyright 2021 The MT3 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataset configurations."""
import dataclasses
from typing import Mapping, Sequence, Union
from mt3 import note_sequences
import tensorflow as tf
@dataclasses.dataclass
class InferEvalSplit:
# key in dictionary containing all dataset splits
|
@dataclasses.dataclass
class DatasetConfig:
"""Configuration for a transcription dataset."""
# dataset name
name: str
# mapping from split name to path
paths: Mapping[str, str]
# mapping from feature name to feature
features: Mapping[str, Union[tf.io.FixedLenFeature,
tf.io.FixedLenSequenceFeature]]
# training split name
train_split: str
# training eval split name
train_eval_split: str
# list of infer eval split specs
infer_eval_splits: Sequence[InferEvalSplit]
# list of track specs to be used for metrics
track_specs: Sequence[note_sequences.TrackSpec] = dataclasses.field(
default_factory=list)
MAESTROV1_CONFIG = DatasetConfig(
name='maestrov1',
paths={
'train':
'gs://magentadata/datasets/maestro/v1.0.0/maestro-v1.0.0_ns_wav_train.tfrecord-?????-of-00010',
'train_subset':
'gs://magentadata/datasets/maestro/v1.0.0/maestro-v1.0.0_ns_wav_train.tfrecord-00002-of-00010',
'validation':
'gs://magentadata/datasets/maestro/v1.0.0/maestro-v1.0.0_ns_wav_validation.tfrecord-?????-of-00010',
'validation_subset':
'gs://magentadata/datasets/maestro/v1.0.0/maestro-v1.0.0_ns_wav_validation.tfrecord-0000[06]-of-00010',
'test':
'gs://magentadata/datasets/maestro/v1.0.0/maestro-v1.0.0_ns_wav_test.tfrecord-?????-of-00010'
},
features={
'audio': tf.io.FixedLenFeature([], dtype=tf.string),
'sequence': tf.io.FixedLenFeature([], dtype=tf.string),
'id': tf.io.FixedLenFeature([], dtype=tf.string)
},
train_split='train',
train_eval_split='validation_subset',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train_full',
include_in_mixture=False),
InferEvalSplit(name='train_subset', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation_full',
include_in_mixture=False),
InferEvalSplit(name='validation_subset', suffix='validation'),
InferEvalSplit(name='test', suffix='test', include_in_mixture=False)
])
MAESTROV3_CONFIG = DatasetConfig(
name='maestrov3',
paths={
'train':
'gs://magentadata/datasets/maestro/v3.0.0/maestro-v3.0.0_ns_wav_train.tfrecord-?????-of-00025',
'train_subset':
'gs://magentadata/datasets/maestro/v3.0.0/maestro-v3.0.0_ns_wav_train.tfrecord-00004-of-00025',
'validation':
'gs://magentadata/datasets/maestro/v3.0.0/maestro-v3.0.0_ns_wav_validation.tfrecord-?????-of-00025',
'validation_subset':
'gs://magentadata/datasets/maestro/v3.0.0/maestro-v3.0.0_ns_wav_validation.tfrecord-0002?-of-00025',
'test':
'gs://magentadata/datasets/maestro/v3.0.0/maestro-v3.0.0_ns_wav_test.tfrecord-?????-of-00025'
},
features={
'audio': tf.io.FixedLenFeature([], dtype=tf.string),
'sequence': tf.io.FixedLenFeature([], dtype=tf.string),
'id': tf.io.FixedLenFeature([], dtype=tf.string)
},
train_split='train',
train_eval_split='validation_subset',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train_full',
include_in_mixture=False),
InferEvalSplit(name='train_subset', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation_full',
include_in_mixture=False),
InferEvalSplit(name='validation_subset', suffix='validation'),
InferEvalSplit(name='test', suffix='test', include_in_mixture=False)
])
GUITARSET_CONFIG = DatasetConfig(
name='guitarset',
paths={
'train':
'gs://mt3/data/datasets/guitarset/train.tfrecord-?????-of-00019',
'validation':
'gs://mt3/data/datasets/guitarset/validation.tfrecord-?????-of-00006',
},
features={
'sequence': tf.io.FixedLenFeature([], dtype=tf.string),
'audio': tf.io.FixedLenFeature([], dtype=tf.string),
'velocity_range': tf.io.FixedLenFeature([], dtype=tf.string),
'id': tf.io.FixedLenFeature([], dtype=tf.string),
},
train_split='train',
train_eval_split='validation',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation'),
])
URMP_CONFIG = DatasetConfig(
name='urmp',
paths={
'train': 'gs://mt3/data/datasets/urmp/train.tfrecord',
'validation': 'gs://mt3/data/datasets/urmp/validation.tfrecord',
},
features={
'id': tf.io.FixedLenFeature([], dtype=tf.string),
'tracks': tf.io.FixedLenSequenceFeature(
[], dtype=tf.int64, allow_missing=True),
'inst_names': tf.io.FixedLenSequenceFeature(
[], dtype=tf.string, allow_missing=True),
'audio': tf.io.FixedLenFeature([], dtype=tf.string),
'sequence': tf.io.FixedLenFeature([], dtype=tf.string),
'instrument_sequences': tf.io.FixedLenSequenceFeature(
[], dtype=tf.string, allow_missing=True),
},
train_split='train',
train_eval_split='validation',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation')
])
MUSICNET_CONFIG = DatasetConfig(
name='musicnet',
paths={
'train':
'gs://mt3/data/datasets/musicnet/musicnet-train.tfrecord-?????-of-00036',
'validation':
'gs://mt3/data/datasets/musicnet/musicnet-validation.tfrecord-?????-of-00005',
'test':
'gs://mt3/data/datasets/musicnet/musicnet-test.tfrecord-?????-of-00003'
},
features={
'id': tf.io.FixedLenFeature([], dtype=tf.string),
'sample_rate': tf.io.FixedLenFeature([], dtype=tf.float32),
'audio': tf.io.FixedLenSequenceFeature(
[], dtype=tf.float32, allow_missing=True),
'sequence': tf.io.FixedLenFeature([], dtype=tf.string)
},
train_split='train',
train_eval_split='validation',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation'),
InferEvalSplit(name='test', suffix='test', include_in_mixture=False)
])
CERBERUS4_CONFIG = DatasetConfig(
name='cerberus4',
paths={
'train':
'gs://mt3/data/datasets/cerberus4/slakh_multi_cerberus_train_bass:drums:guitar:piano.tfrecord-?????-of-00286',
'train_subset':
'gs://mt3/data/datasets/cerberus4/slakh_multi_cerberus_train_bass:drums:guitar:piano.tfrecord-00000-of-00286',
'validation':
'gs://mt3/data/datasets/cerberus4/slakh_multi_cerberus_validation_bass:drums:guitar:piano.tfrecord-?????-of-00212',
'validation_subset':
'gs://mt3/data/datasets/cerberus4/slakh_multi_cerberus_validation_bass:drums:guitar:piano.tfrecord-0000?-of-00212',
'test':
'gs://mt3/data/datasets/cerberus4/slakh_multi_cerberus_test_bass:drums:guitar:piano.tfrecord-?????-of-00106'
},
features={
'audio_sample_rate': tf.io.FixedLenFeature([], dtype=tf.int64),
'inst_names': tf.io.FixedLenSequenceFeature(
[], dtype=tf.string, allow_missing=True),
'midi_class': tf.io.FixedLenSequenceFeature(
[], dtype=tf.int64, allow_missing=True),
'mix': tf.io.FixedLenSequenceFeature(
[], dtype=tf.float32, allow_missing=True),
'note_sequences': tf.io.FixedLenSequenceFeature(
[], dtype=tf.string, allow_missing=True),
'plugin_name': tf.io.FixedLenSequenceFeature(
[], dtype=tf.int64, allow_missing=True),
'program_num': tf.io.FixedLenSequenceFeature(
[], dtype=tf.int64, allow_missing=True),
'slakh_class': tf.io.FixedLenSequenceFeature(
[], dtype=tf.int64, allow_missing=True),
'src_ids': tf.io.FixedLenSequenceFeature(
[], dtype=tf.string, allow_missing=True),
'stems': tf.io.FixedLenSequenceFeature(
[], dtype=tf.float32, allow_missing=True),
'stems_shape': tf.io.FixedLenFeature([2], dtype=tf.int64),
'target_type': tf.io.FixedLenFeature([], dtype=tf.string),
'track_id': tf.io.FixedLenFeature([], dtype=tf.string),
},
train_split='train',
train_eval_split='validation_subset',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train_full',
include_in_mixture=False),
InferEvalSplit(name='train_subset', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation_full',
include_in_mixture=False),
InferEvalSplit(name='validation_subset', suffix='validation'),
InferEvalSplit(name='test', suffix='test', include_in_mixture=False)
],
track_specs=[
note_sequences.TrackSpec('bass', program=32),
note_sequences.TrackSpec('drums', is_drum=True),
note_sequences.TrackSpec('guitar', program=24),
note_sequences.TrackSpec('piano', program=0)
])
SLAKH_CONFIG = DatasetConfig(
name='slakh',
paths={
'train':
'gs://mt3/data/datasets/slakh/slakh_multi_full_subsets_10_train_all_inst.tfrecord-?????-of-02307',
'train_subset':
'gs://mt3/data/datasets/slakh/slakh_multi_full_subsets_10_train_all_inst.tfrecord-00000-of-02307',
'validation':
'gs://mt3/data/datasets/slakh/slakh_multi_full_validation_all_inst.tfrecord-?????-of-00168',
'validation_subset':
'gs://mt3/data/datasets/slakh/slakh_multi_full_validation_all_inst.tfrecord-0000?-of-00168',
'test':
'gs://mt3/data/datasets/slakh/slakh_multi_full_test_all_inst.tfrecord-?????-of-00109'
},
features={
'audio_sample_rate': tf.io.FixedLenFeature([], dtype=tf.int64),
'inst_names': tf.io.FixedLenSequenceFeature([], dtype=tf.string,
allow_missing=True),
'midi_class': tf.io.FixedLenSequenceFeature([], dtype=tf.int64,
allow_missing=True),
'mix': tf.io.FixedLenSequenceFeature([], dtype=tf.float32,
allow_missing=True),
'note_sequences': tf.io.FixedLenSequenceFeature([], dtype=tf.string,
allow_missing=True),
'plugin_name': tf.io.FixedLenSequenceFeature([], dtype=tf.int64,
allow_missing=True),
'program_num': tf.io.FixedLenSequenceFeature([], dtype=tf.int64,
allow_missing=True),
'slakh_class': tf.io.FixedLenSequenceFeature([], dtype=tf.int64,
allow_missing=True),
'src_ids': tf.io.FixedLenSequenceFeature([], dtype=tf.string,
allow_missing=True),
'stems': tf.io.FixedLenSequenceFeature([], dtype=tf.float32,
allow_missing=True),
'stems_shape': tf.io.FixedLenFeature([2], dtype=tf.int64),
'target_type': tf.io.FixedLenFeature([], dtype=tf.string),
'track_id': tf.io.FixedLenFeature([], dtype=tf.string),
},
train_split='train',
train_eval_split='validation_subset',
infer_eval_splits=[
InferEvalSplit(name='train', suffix='eval_train_full',
include_in_mixture=False),
InferEvalSplit(name='train_subset', suffix='eval_train'),
InferEvalSplit(name='validation', suffix='validation_full',
include_in_mixture=False),
InferEvalSplit(name='validation_subset', suffix='validation'),
InferEvalSplit(name='test', suffix='test', include_in_mixture=False)
])
|
name: str
# task name suffix (each eval split is a separate task)
suffix: str
# whether or not to include in the mixture of all eval tasks
include_in_mixture: bool = True
|
migrationschedule.go
|
/*
Copyright 2018 Openstorage.org
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by lister-gen. DO NOT EDIT.
package v1alpha1
import (
v1alpha1 "github.com/libopenstorage/stork/pkg/apis/stork/v1alpha1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/tools/cache"
)
// MigrationScheduleLister helps list MigrationSchedules.
// All objects returned here must be treated as read-only.
type MigrationScheduleLister interface {
// List lists all MigrationSchedules in the indexer.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha1.MigrationSchedule, err error)
// MigrationSchedules returns an object that can list and get MigrationSchedules.
MigrationSchedules(namespace string) MigrationScheduleNamespaceLister
MigrationScheduleListerExpansion
}
// migrationScheduleLister implements the MigrationScheduleLister interface.
type migrationScheduleLister struct {
indexer cache.Indexer
}
// NewMigrationScheduleLister returns a new MigrationScheduleLister.
func NewMigrationScheduleLister(indexer cache.Indexer) MigrationScheduleLister
|
// List lists all MigrationSchedules in the indexer.
func (s *migrationScheduleLister) List(selector labels.Selector) (ret []*v1alpha1.MigrationSchedule, err error) {
err = cache.ListAll(s.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.MigrationSchedule))
})
return ret, err
}
// MigrationSchedules returns an object that can list and get MigrationSchedules.
func (s *migrationScheduleLister) MigrationSchedules(namespace string) MigrationScheduleNamespaceLister {
return migrationScheduleNamespaceLister{indexer: s.indexer, namespace: namespace}
}
// MigrationScheduleNamespaceLister helps list and get MigrationSchedules.
// All objects returned here must be treated as read-only.
type MigrationScheduleNamespaceLister interface {
// List lists all MigrationSchedules in the indexer for a given namespace.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha1.MigrationSchedule, err error)
// Get retrieves the MigrationSchedule from the indexer for a given namespace and name.
// Objects returned here must be treated as read-only.
Get(name string) (*v1alpha1.MigrationSchedule, error)
MigrationScheduleNamespaceListerExpansion
}
// migrationScheduleNamespaceLister implements the MigrationScheduleNamespaceLister
// interface.
type migrationScheduleNamespaceLister struct {
indexer cache.Indexer
namespace string
}
// List lists all MigrationSchedules in the indexer for a given namespace.
func (s migrationScheduleNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.MigrationSchedule, err error) {
err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.MigrationSchedule))
})
return ret, err
}
// Get retrieves the MigrationSchedule from the indexer for a given namespace and name.
func (s migrationScheduleNamespaceLister) Get(name string) (*v1alpha1.MigrationSchedule, error) {
obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name)
if err != nil {
return nil, err
}
if !exists {
return nil, errors.NewNotFound(v1alpha1.Resource("migrationschedule"), name)
}
return obj.(*v1alpha1.MigrationSchedule), nil
}
|
{
return &migrationScheduleLister{indexer: indexer}
}
|
ComplimentsRepositories.ts
|
import { EntityRepository, Repository } from "typeorm";
import { Compliment } from "../entities/Compliment";
@EntityRepository(Compliment)
class
|
extends Repository<Compliment> { }
export { ComplimentsRepositories };
|
ComplimentsRepositories
|
js-native-link-core.ts
|
import { BaseManager } from "../base/base-manager";
import { JSMethod, JSMethodInfo, NativeMethodInfo } from "../base/base-type";
import { ManagerGroup } from "../base/manager-group";
export class JsNativeLinkCore {
jsMethodBook: Map<string, Map<string, JSMethod>> = new Map();
constructor(){
window['invokeJSMethod'] = this.invokeJSMethod;
}
invokeNativeMethod = (nativeMethodInfo: NativeMethodInfo, params: any) => {
let func = window['invokeNativeMethod'];
if (!func) {
throw 'InvokeNativeMethod must be implemented in Native.'
}
if (!(func instanceof Function)) {
throw 'InvokeNativeMethod must be a Function.'
}
return func(nativeMethodInfo, params);
}
invokeJSMethod = (jsMethodInfo: JSMethodInfo, params: any) => {
if (!jsMethodInfo.className || !jsMethodInfo.methodName) {
|
let classInfo = this.jsMethodBook.get(jsMethodInfo.className);
if (!classInfo) {
return undefined;
}
let func= classInfo.get(jsMethodInfo.methodName);
if (!func) {
return undefined;
}
return func(params);
}
regsiterJsMethod = (jsMethodInfo: JSMethodInfo, jsMethod: JSMethod) => {
if (!jsMethodInfo.className || !jsMethodInfo.methodName) {
return false;
}
let classInfo = this.jsMethodBook.get(jsMethodInfo.className);
if (!classInfo) {
classInfo = new Map();
}
classInfo.set(jsMethodInfo.methodName, jsMethod);
this.jsMethodBook.set(jsMethodInfo.className, classInfo);
return true;
}
}
|
return undefined;
}
|
channels.go
|
/**
* @Author: Henry
|
* @Description:
* @File: channels.go
* @Version: 1.0.0
* @Date: 2020/3/8 8:28 PM
*/
package main
import "fmt"
func sum(s []int,c chan int){
sum := 0
for _,v := range s{
sum += v
}
c <- sum
}
func main() {
s := []int{3,4,5,2,0}
c := make(chan int)
go sum(s[:len(s)/2],c)
go sum(s[len(s)/2:],c)
x := <- c
y := <- c
fmt.Println(x,y)
fmt.Println(x + y)
}
| |
mod.rs
|
//! Android-specific definitions for linux-like values
pub type clock_t = ::c_long;
pub type time_t = ::c_long;
pub type suseconds_t = ::c_long;
pub type off_t = ::c_long;
pub type blkcnt_t = ::c_ulong;
pub type blksize_t = ::c_ulong;
pub type nlink_t = u32;
pub type useconds_t = u32;
pub type pthread_t = ::c_long;
pub type pthread_mutexattr_t = ::c_long;
pub type pthread_rwlockattr_t = ::c_long;
pub type pthread_barrierattr_t = ::c_int;
pub type pthread_condattr_t = ::c_long;
pub type pthread_key_t = ::c_int;
pub type fsfilcnt_t = ::c_ulong;
pub type fsblkcnt_t = ::c_ulong;
pub type nfds_t = ::c_uint;
pub type rlim_t = ::c_ulong;
pub type dev_t = ::c_ulong;
pub type ino_t = ::c_ulong;
pub type ino64_t = u64;
pub type __CPU_BITTYPE = ::c_ulong;
pub type idtype_t = ::c_int;
pub type loff_t = ::c_longlong;
pub type __kernel_loff_t = ::c_longlong;
pub type __kernel_pid_t = ::c_int;
pub type __u8 = ::c_uchar;
pub type __u16 = ::c_ushort;
pub type __s16 = ::c_short;
pub type __u32 = ::c_uint;
pub type __s32 = ::c_int;
// linux/elf.h
pub type Elf32_Addr = u32;
pub type Elf32_Half = u16;
pub type Elf32_Off = u32;
pub type Elf32_Word = u32;
pub type Elf64_Addr = u64;
pub type Elf64_Half = u16;
pub type Elf64_Off = u64;
pub type Elf64_Word = u32;
pub type Elf64_Xword = u64;
s! {
pub struct stack_t {
pub ss_sp: *mut ::c_void,
pub ss_flags: ::c_int,
pub ss_size: ::size_t
}
pub struct __fsid_t {
__val: [::c_int; 2],
}
pub struct msghdr {
pub msg_name: *mut ::c_void,
pub msg_namelen: ::socklen_t,
pub msg_iov: *mut ::iovec,
pub msg_iovlen: ::size_t,
pub msg_control: *mut ::c_void,
pub msg_controllen: ::size_t,
pub msg_flags: ::c_int,
}
pub struct cmsghdr {
pub cmsg_len: ::size_t,
pub cmsg_level: ::c_int,
pub cmsg_type: ::c_int,
}
pub struct termios {
pub c_iflag: ::tcflag_t,
pub c_oflag: ::tcflag_t,
pub c_cflag: ::tcflag_t,
pub c_lflag: ::tcflag_t,
pub c_line: ::cc_t,
pub c_cc: [::cc_t; ::NCCS],
}
pub struct termios2 {
pub c_iflag: ::tcflag_t,
pub c_oflag: ::tcflag_t,
pub c_cflag: ::tcflag_t,
pub c_lflag: ::tcflag_t,
pub c_line: ::cc_t,
pub c_cc: [::cc_t; 19],
pub c_ispeed: ::speed_t,
pub c_ospeed: ::speed_t,
}
pub struct mallinfo {
pub arena: ::size_t,
pub ordblks: ::size_t,
pub smblks: ::size_t,
pub hblks: ::size_t,
pub hblkhd: ::size_t,
pub usmblks: ::size_t,
pub fsmblks: ::size_t,
pub uordblks: ::size_t,
pub fordblks: ::size_t,
pub keepcost: ::size_t,
}
pub struct flock {
pub l_type: ::c_short,
pub l_whence: ::c_short,
pub l_start: ::off_t,
pub l_len: ::off_t,
pub l_pid: ::pid_t,
}
pub struct flock64 {
pub l_type: ::c_short,
pub l_whence: ::c_short,
pub l_start: ::__kernel_loff_t,
pub l_len: ::__kernel_loff_t,
pub l_pid: ::__kernel_pid_t,
}
pub struct cpu_set_t {
#[cfg(target_pointer_width = "64")]
__bits: [__CPU_BITTYPE; 16],
#[cfg(target_pointer_width = "32")]
__bits: [__CPU_BITTYPE; 1],
}
pub struct sem_t {
count: ::c_uint,
#[cfg(target_pointer_width = "64")]
__reserved: [::c_int; 3],
}
pub struct exit_status {
pub e_termination: ::c_short,
pub e_exit: ::c_short,
}
pub struct statvfs {
pub f_bsize: ::c_ulong,
pub f_frsize: ::c_ulong,
pub f_blocks: ::fsblkcnt_t,
pub f_bfree: ::fsblkcnt_t,
pub f_bavail: ::fsblkcnt_t,
pub f_files: ::fsfilcnt_t,
pub f_ffree: ::fsfilcnt_t,
pub f_favail: ::fsfilcnt_t,
pub f_fsid: ::c_ulong,
pub f_flag: ::c_ulong,
pub f_namemax: ::c_ulong,
#[cfg(target_pointer_width = "64")]
__f_reserved: [u32; 6],
}
pub struct signalfd_siginfo {
pub ssi_signo: u32,
pub ssi_errno: i32,
pub ssi_code: i32,
pub ssi_pid: u32,
pub ssi_uid: u32,
pub ssi_fd: i32,
pub ssi_tid: u32,
pub ssi_band: u32,
pub ssi_overrun: u32,
pub ssi_trapno: u32,
pub ssi_status: i32,
pub ssi_int: i32,
pub ssi_ptr: ::c_ulonglong,
pub ssi_utime: ::c_ulonglong,
pub ssi_stime: ::c_ulonglong,
pub ssi_addr: ::c_ulonglong,
pub ssi_addr_lsb: u16,
_pad2: u16,
pub ssi_syscall: i32,
pub ssi_call_addr: u64,
pub ssi_arch: u32,
_pad: [u8; 28],
}
pub struct itimerspec {
pub it_interval: ::timespec,
pub it_value: ::timespec,
}
pub struct ucred {
pub pid: ::pid_t,
pub uid: ::uid_t,
pub gid: ::gid_t,
}
pub struct genlmsghdr {
pub cmd: u8,
pub version: u8,
pub reserved: u16,
}
pub struct nlmsghdr {
pub nlmsg_len: u32,
pub nlmsg_type: u16,
pub nlmsg_flags: u16,
pub nlmsg_seq: u32,
pub nlmsg_pid: u32,
}
pub struct nlmsgerr {
pub error: ::c_int,
pub msg: nlmsghdr,
}
pub struct nl_pktinfo {
pub group: u32,
}
pub struct nl_mmap_req {
pub nm_block_size: ::c_uint,
pub nm_block_nr: ::c_uint,
pub nm_frame_size: ::c_uint,
pub nm_frame_nr: ::c_uint,
}
pub struct nl_mmap_hdr {
pub nm_status: ::c_uint,
pub nm_len: ::c_uint,
pub nm_group: u32,
pub nm_pid: u32,
pub nm_uid: u32,
pub nm_gid: u32,
}
pub struct nlattr {
pub nla_len: u16,
pub nla_type: u16,
}
pub struct in6_pktinfo {
pub ipi6_addr: ::in6_addr,
pub ipi6_ifindex: ::c_int,
}
pub struct inotify_event {
pub wd: ::c_int,
pub mask: u32,
pub cookie: u32,
pub len: u32
}
pub struct sock_extended_err {
pub ee_errno: u32,
pub ee_origin: u8,
pub ee_type: u8,
pub ee_code: u8,
pub ee_pad: u8,
pub ee_info: u32,
pub ee_data: u32,
}
pub struct regex_t {
re_magic: ::c_int,
re_nsub: ::size_t,
re_endp: *const ::c_char,
re_guts: *mut ::c_void,
}
pub struct regmatch_t {
pub rm_so: ::ssize_t,
pub rm_eo: ::ssize_t,
}
pub struct sockaddr_vm {
pub svm_family: ::sa_family_t,
pub svm_reserved1: ::c_ushort,
pub svm_port: ::c_uint,
pub svm_cid: ::c_uint,
pub svm_zero: [u8; 4]
}
// linux/elf.h
pub struct Elf32_Phdr {
pub p_type: Elf32_Word,
pub p_offset: Elf32_Off,
pub p_vaddr: Elf32_Addr,
pub p_paddr: Elf32_Addr,
pub p_filesz: Elf32_Word,
pub p_memsz: Elf32_Word,
pub p_flags: Elf32_Word,
pub p_align: Elf32_Word,
}
pub struct Elf64_Phdr {
pub p_type: Elf64_Word,
pub p_flags: Elf64_Word,
pub p_offset: Elf64_Off,
pub p_vaddr: Elf64_Addr,
pub p_paddr: Elf64_Addr,
pub p_filesz: Elf64_Xword,
pub p_memsz: Elf64_Xword,
pub p_align: Elf64_Xword,
}
// link.h
pub struct dl_phdr_info {
#[cfg(target_pointer_width = "64")]
pub dlpi_addr: Elf64_Addr,
#[cfg(target_pointer_width = "32")]
pub dlpi_addr: Elf32_Addr,
pub dlpi_name: *const ::c_char,
#[cfg(target_pointer_width = "64")]
pub dlpi_phdr: *const Elf64_Phdr,
#[cfg(target_pointer_width = "32")]
pub dlpi_phdr: *const Elf32_Phdr,
#[cfg(target_pointer_width = "64")]
pub dlpi_phnum: Elf64_Half,
#[cfg(target_pointer_width = "32")]
pub dlpi_phnum: Elf32_Half,
// These fields were added in Android R
pub dlpi_adds: ::c_ulonglong,
pub dlpi_subs: ::c_ulonglong,
pub dlpi_tls_modid: ::size_t,
pub dlpi_tls_data: *mut ::c_void,
}
// linux/filter.h
pub struct sock_filter {
pub code: ::__u16,
pub jt: ::__u8,
pub jf: ::__u8,
pub k: ::__u32,
}
pub struct sock_fprog {
pub len: ::c_ushort,
pub filter: *mut sock_filter,
}
// linux/seccomp.h
pub struct seccomp_data {
pub nr: ::c_int,
pub arch: ::__u32,
pub instruction_pointer: ::__u64,
pub args: [::__u64; 6],
}
}
s_no_extra_traits! {
pub struct sockaddr_nl {
pub nl_family: ::sa_family_t,
nl_pad: ::c_ushort,
pub nl_pid: u32,
pub nl_groups: u32
}
pub struct dirent {
pub d_ino: u64,
pub d_off: i64,
pub d_reclen: ::c_ushort,
pub d_type: ::c_uchar,
pub d_name: [::c_char; 256],
}
pub struct dirent64 {
pub d_ino: u64,
pub d_off: i64,
pub d_reclen: ::c_ushort,
pub d_type: ::c_uchar,
pub d_name: [::c_char; 256],
}
pub struct siginfo_t {
pub si_signo: ::c_int,
pub si_errno: ::c_int,
pub si_code: ::c_int,
pub _pad: [::c_int; 29],
_align: [usize; 0],
}
pub struct lastlog {
ll_time: ::time_t,
ll_line: [::c_char; UT_LINESIZE],
ll_host: [::c_char; UT_HOSTSIZE],
}
pub struct utmp {
pub ut_type: ::c_short,
pub ut_pid: ::pid_t,
pub ut_line: [::c_char; UT_LINESIZE],
pub ut_id: [::c_char; 4],
pub ut_user: [::c_char; UT_NAMESIZE],
pub ut_host: [::c_char; UT_HOSTSIZE],
pub ut_exit: exit_status,
pub ut_session: ::c_long,
pub ut_tv: ::timeval,
pub ut_addr_v6: [i32; 4],
unused: [::c_char; 20],
}
pub struct sockaddr_alg {
pub salg_family: ::sa_family_t,
pub salg_type: [::c_uchar; 14],
pub salg_feat: u32,
pub salg_mask: u32,
pub salg_name: [::c_uchar; 64],
}
/// WARNING: The `PartialEq`, `Eq` and `Hash` implementations of this
/// type are unsound and will be removed in the future.
#[deprecated(
note = "this struct has unsafe trait implementations that will be \
removed in the future",
since = "0.2.80"
)]
pub struct af_alg_iv {
pub ivlen: u32,
pub iv: [::c_uchar; 0],
}
pub struct prop_info {
__name: [::c_char; 32],
__serial: ::c_uint,
__value: [[::c_char; 4]; 23],
}
}
cfg_if! {
if #[cfg(feature = "extra_traits")] {
impl PartialEq for sockaddr_nl {
fn eq(&self, other: &sockaddr_nl) -> bool {
self.nl_family == other.nl_family &&
self.nl_pid == other.nl_pid &&
self.nl_groups == other.nl_groups
}
}
impl Eq for sockaddr_nl {}
impl ::fmt::Debug for sockaddr_nl {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("sockaddr_nl")
.field("nl_family", &self.nl_family)
.field("nl_pid", &self.nl_pid)
.field("nl_groups", &self.nl_groups)
.finish()
}
}
impl ::hash::Hash for sockaddr_nl {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.nl_family.hash(state);
self.nl_pid.hash(state);
self.nl_groups.hash(state);
}
}
impl PartialEq for dirent {
fn eq(&self, other: &dirent) -> bool {
self.d_ino == other.d_ino
&& self.d_off == other.d_off
&& self.d_reclen == other.d_reclen
&& self.d_type == other.d_type
&& self
.d_name
.iter()
.zip(other.d_name.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for dirent {}
impl ::fmt::Debug for dirent {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("dirent")
.field("d_ino", &self.d_ino)
.field("d_off", &self.d_off)
.field("d_reclen", &self.d_reclen)
.field("d_type", &self.d_type)
// FIXME: .field("d_name", &self.d_name)
.finish()
}
}
impl ::hash::Hash for dirent {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.d_ino.hash(state);
self.d_off.hash(state);
self.d_reclen.hash(state);
self.d_type.hash(state);
self.d_name.hash(state);
}
}
impl PartialEq for dirent64 {
fn eq(&self, other: &dirent64) -> bool {
self.d_ino == other.d_ino
&& self.d_off == other.d_off
&& self.d_reclen == other.d_reclen
&& self.d_type == other.d_type
&& self
.d_name
.iter()
.zip(other.d_name.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for dirent64 {}
impl ::fmt::Debug for dirent64 {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("dirent64")
.field("d_ino", &self.d_ino)
.field("d_off", &self.d_off)
.field("d_reclen", &self.d_reclen)
.field("d_type", &self.d_type)
// FIXME: .field("d_name", &self.d_name)
.finish()
}
}
impl ::hash::Hash for dirent64 {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.d_ino.hash(state);
self.d_off.hash(state);
self.d_reclen.hash(state);
self.d_type.hash(state);
self.d_name.hash(state);
}
}
impl PartialEq for siginfo_t {
fn eq(&self, other: &siginfo_t) -> bool {
self.si_signo == other.si_signo
&& self.si_errno == other.si_errno
&& self.si_code == other.si_code
// Ignore _pad
// Ignore _align
}
}
impl Eq for siginfo_t {}
impl ::fmt::Debug for siginfo_t {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("siginfo_t")
.field("si_signo", &self.si_signo)
.field("si_errno", &self.si_errno)
.field("si_code", &self.si_code)
// Ignore _pad
// Ignore _align
.finish()
}
}
impl ::hash::Hash for siginfo_t {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.si_signo.hash(state);
self.si_errno.hash(state);
self.si_code.hash(state);
// Ignore _pad
// Ignore _align
}
}
impl PartialEq for lastlog {
fn eq(&self, other: &lastlog) -> bool {
self.ll_time == other.ll_time
&& self
.ll_line
.iter()
.zip(other.ll_line.iter())
.all(|(a,b)| a == b)
&& self
.ll_host
.iter()
.zip(other.ll_host.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for lastlog {}
impl ::fmt::Debug for lastlog {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("lastlog")
.field("ll_time", &self.ll_time)
.field("ll_line", &self.ll_line)
// FIXME: .field("ll_host", &self.ll_host)
.finish()
}
}
impl ::hash::Hash for lastlog {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.ll_time.hash(state);
self.ll_line.hash(state);
self.ll_host.hash(state);
}
}
impl PartialEq for utmp {
fn eq(&self, other: &utmp) -> bool {
self.ut_type == other.ut_type
&& self.ut_pid == other.ut_pid
&& self
.ut_line
.iter()
.zip(other.ut_line.iter())
.all(|(a,b)| a == b)
&& self.ut_id == other.ut_id
&& self
.ut_user
.iter()
.zip(other.ut_user.iter())
.all(|(a,b)| a == b)
&& self
.ut_host
.iter()
.zip(other.ut_host.iter())
.all(|(a,b)| a == b)
&& self.ut_exit == other.ut_exit
&& self.ut_session == other.ut_session
&& self.ut_tv == other.ut_tv
&& self.ut_addr_v6 == other.ut_addr_v6
&& self.unused == other.unused
}
}
impl Eq for utmp {}
impl ::fmt::Debug for utmp {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("utmp")
.field("ut_type", &self.ut_type)
.field("ut_pid", &self.ut_pid)
.field("ut_line", &self.ut_line)
.field("ut_id", &self.ut_id)
.field("ut_user", &self.ut_user)
// FIXME: .field("ut_host", &self.ut_host)
.field("ut_exit", &self.ut_exit)
.field("ut_session", &self.ut_session)
.field("ut_tv", &self.ut_tv)
.field("ut_addr_v6", &self.ut_addr_v6)
.field("unused", &self.unused)
.finish()
}
}
impl ::hash::Hash for utmp {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.ut_type.hash(state);
self.ut_pid.hash(state);
self.ut_line.hash(state);
self.ut_id.hash(state);
self.ut_user.hash(state);
self.ut_host.hash(state);
self.ut_exit.hash(state);
self.ut_session.hash(state);
self.ut_tv.hash(state);
self.ut_addr_v6.hash(state);
self.unused.hash(state);
}
}
impl PartialEq for sockaddr_alg {
fn eq(&self, other: &sockaddr_alg) -> bool {
self.salg_family == other.salg_family
&& self
.salg_type
.iter()
.zip(other.salg_type.iter())
.all(|(a, b)| a == b)
&& self.salg_feat == other.salg_feat
&& self.salg_mask == other.salg_mask
&& self
.salg_name
.iter()
.zip(other.salg_name.iter())
.all(|(a, b)| a == b)
}
}
impl Eq for sockaddr_alg {}
impl ::fmt::Debug for sockaddr_alg {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("sockaddr_alg")
.field("salg_family", &self.salg_family)
.field("salg_type", &self.salg_type)
.field("salg_feat", &self.salg_feat)
.field("salg_mask", &self.salg_mask)
.field("salg_name", &&self.salg_name[..])
.finish()
}
}
impl ::hash::Hash for sockaddr_alg {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.salg_family.hash(state);
self.salg_type.hash(state);
self.salg_feat.hash(state);
self.salg_mask.hash(state);
self.salg_name.hash(state);
}
}
#[allow(deprecated)]
impl af_alg_iv {
fn as_slice(&self) -> &[u8] {
unsafe {
::core::slice::from_raw_parts(
self.iv.as_ptr(),
self.ivlen as usize
)
}
}
}
#[allow(deprecated)]
impl PartialEq for af_alg_iv {
fn eq(&self, other: &af_alg_iv) -> bool {
*self.as_slice() == *other.as_slice()
}
}
#[allow(deprecated)]
impl Eq for af_alg_iv {}
#[allow(deprecated)]
impl ::fmt::Debug for af_alg_iv {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("af_alg_iv")
.field("ivlen", &self.ivlen)
.finish()
}
}
#[allow(deprecated)]
impl ::hash::Hash for af_alg_iv {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.as_slice().hash(state);
}
}
impl PartialEq for prop_info {
fn eq(&self, other: &prop_info) -> bool {
self.__name == other.__name &&
self.__serial == other.__serial &&
self.__value == other.__value
}
}
impl Eq for prop_info {}
impl ::fmt::Debug for prop_info {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("prop_info")
.field("__name", &self.__name)
.field("__serial", &self.__serial)
.field("__value", &self.__value)
.finish()
}
}
}
}
pub const MADV_SOFT_OFFLINE: ::c_int = 101;
pub const MS_NOUSER: ::c_ulong = 0xffffffff80000000;
pub const MS_RMT_MASK: ::c_ulong = 0x02800051;
pub const O_TRUNC: ::c_int = 512;
pub const O_CLOEXEC: ::c_int = 0x80000;
pub const O_PATH: ::c_int = 0o10000000;
pub const O_NOATIME: ::c_int = 0o1000000;
pub const EBFONT: ::c_int = 59;
pub const ENOSTR: ::c_int = 60;
pub const ENODATA: ::c_int = 61;
pub const ETIME: ::c_int = 62;
pub const ENOSR: ::c_int = 63;
pub const ENONET: ::c_int = 64;
pub const ENOPKG: ::c_int = 65;
pub const EREMOTE: ::c_int = 66;
pub const ENOLINK: ::c_int = 67;
pub const EADV: ::c_int = 68;
pub const ESRMNT: ::c_int = 69;
pub const ECOMM: ::c_int = 70;
pub const EPROTO: ::c_int = 71;
pub const EDOTDOT: ::c_int = 73;
pub const EPOLL_CLOEXEC: ::c_int = 0x80000;
// sys/eventfd.h
pub const EFD_SEMAPHORE: ::c_int = 0x1;
pub const EFD_CLOEXEC: ::c_int = O_CLOEXEC;
pub const EFD_NONBLOCK: ::c_int = O_NONBLOCK;
// sys/timerfd.h
pub const TFD_CLOEXEC: ::c_int = O_CLOEXEC;
pub const TFD_NONBLOCK: ::c_int = O_NONBLOCK;
pub const TFD_TIMER_ABSTIME: ::c_int = 1;
pub const TFD_TIMER_CANCEL_ON_SET: ::c_int = 2;
pub const USER_PROCESS: ::c_short = 7;
// linux/falloc.h
pub const FALLOC_FL_KEEP_SIZE: ::c_int = 0x01;
pub const FALLOC_FL_PUNCH_HOLE: ::c_int = 0x02;
pub const FALLOC_FL_NO_HIDE_STALE: ::c_int = 0x04;
pub const FALLOC_FL_COLLAPSE_RANGE: ::c_int = 0x08;
pub const FALLOC_FL_ZERO_RANGE: ::c_int = 0x10;
pub const FALLOC_FL_INSERT_RANGE: ::c_int = 0x20;
pub const FALLOC_FL_UNSHARE_RANGE: ::c_int = 0x40;
pub const BUFSIZ: ::c_uint = 1024;
pub const FILENAME_MAX: ::c_uint = 4096;
pub const FOPEN_MAX: ::c_uint = 20;
pub const POSIX_FADV_DONTNEED: ::c_int = 4;
pub const POSIX_FADV_NOREUSE: ::c_int = 5;
pub const L_tmpnam: ::c_uint = 4096;
pub const TMP_MAX: ::c_uint = 308915776;
pub const _PC_LINK_MAX: ::c_int = 1;
pub const _PC_MAX_CANON: ::c_int = 2;
pub const _PC_MAX_INPUT: ::c_int = 3;
pub const _PC_NAME_MAX: ::c_int = 4;
pub const _PC_PATH_MAX: ::c_int = 5;
pub const _PC_PIPE_BUF: ::c_int = 6;
pub const _PC_2_SYMLINKS: ::c_int = 7;
pub const _PC_ALLOC_SIZE_MIN: ::c_int = 8;
pub const _PC_REC_INCR_XFER_SIZE: ::c_int = 9;
pub const _PC_REC_MAX_XFER_SIZE: ::c_int = 10;
pub const _PC_REC_MIN_XFER_SIZE: ::c_int = 11;
pub const _PC_REC_XFER_ALIGN: ::c_int = 12;
pub const _PC_SYMLINK_MAX: ::c_int = 13;
pub const _PC_CHOWN_RESTRICTED: ::c_int = 14;
pub const _PC_NO_TRUNC: ::c_int = 15;
pub const _PC_VDISABLE: ::c_int = 16;
pub const _PC_ASYNC_IO: ::c_int = 17;
pub const _PC_PRIO_IO: ::c_int = 18;
pub const _PC_SYNC_IO: ::c_int = 19;
pub const FIONBIO: ::c_int = 0x5421;
pub const _SC_ARG_MAX: ::c_int = 0;
pub const _SC_BC_BASE_MAX: ::c_int = 1;
pub const _SC_BC_DIM_MAX: ::c_int = 2;
pub const _SC_BC_SCALE_MAX: ::c_int = 3;
pub const _SC_BC_STRING_MAX: ::c_int = 4;
pub const _SC_CHILD_MAX: ::c_int = 5;
pub const _SC_CLK_TCK: ::c_int = 6;
pub const _SC_COLL_WEIGHTS_MAX: ::c_int = 7;
pub const _SC_EXPR_NEST_MAX: ::c_int = 8;
pub const _SC_LINE_MAX: ::c_int = 9;
pub const _SC_NGROUPS_MAX: ::c_int = 10;
pub const _SC_OPEN_MAX: ::c_int = 11;
pub const _SC_PASS_MAX: ::c_int = 12;
pub const _SC_2_C_BIND: ::c_int = 13;
pub const _SC_2_C_DEV: ::c_int = 14;
pub const _SC_2_C_VERSION: ::c_int = 15;
pub const _SC_2_CHAR_TERM: ::c_int = 16;
pub const _SC_2_FORT_DEV: ::c_int = 17;
pub const _SC_2_FORT_RUN: ::c_int = 18;
pub const _SC_2_LOCALEDEF: ::c_int = 19;
pub const _SC_2_SW_DEV: ::c_int = 20;
pub const _SC_2_UPE: ::c_int = 21;
pub const _SC_2_VERSION: ::c_int = 22;
pub const _SC_JOB_CONTROL: ::c_int = 23;
pub const _SC_SAVED_IDS: ::c_int = 24;
pub const _SC_VERSION: ::c_int = 25;
pub const _SC_RE_DUP_MAX: ::c_int = 26;
pub const _SC_STREAM_MAX: ::c_int = 27;
pub const _SC_TZNAME_MAX: ::c_int = 28;
pub const _SC_XOPEN_CRYPT: ::c_int = 29;
pub const _SC_XOPEN_ENH_I18N: ::c_int = 30;
pub const _SC_XOPEN_SHM: ::c_int = 31;
pub const _SC_XOPEN_VERSION: ::c_int = 32;
pub const _SC_XOPEN_XCU_VERSION: ::c_int = 33;
pub const _SC_XOPEN_REALTIME: ::c_int = 34;
pub const _SC_XOPEN_REALTIME_THREADS: ::c_int = 35;
pub const _SC_XOPEN_LEGACY: ::c_int = 36;
pub const _SC_ATEXIT_MAX: ::c_int = 37;
pub const _SC_IOV_MAX: ::c_int = 38;
pub const _SC_PAGESIZE: ::c_int = 39;
pub const _SC_PAGE_SIZE: ::c_int = 40;
pub const _SC_XOPEN_UNIX: ::c_int = 41;
pub const _SC_XBS5_ILP32_OFF32: ::c_int = 42;
pub const _SC_XBS5_ILP32_OFFBIG: ::c_int = 43;
pub const _SC_XBS5_LP64_OFF64: ::c_int = 44;
pub const _SC_XBS5_LPBIG_OFFBIG: ::c_int = 45;
pub const _SC_AIO_LISTIO_MAX: ::c_int = 46;
pub const _SC_AIO_MAX: ::c_int = 47;
pub const _SC_AIO_PRIO_DELTA_MAX: ::c_int = 48;
pub const _SC_DELAYTIMER_MAX: ::c_int = 49;
pub const _SC_MQ_OPEN_MAX: ::c_int = 50;
pub const _SC_MQ_PRIO_MAX: ::c_int = 51;
pub const _SC_RTSIG_MAX: ::c_int = 52;
pub const _SC_SEM_NSEMS_MAX: ::c_int = 53;
pub const _SC_SEM_VALUE_MAX: ::c_int = 54;
pub const _SC_SIGQUEUE_MAX: ::c_int = 55;
pub const _SC_TIMER_MAX: ::c_int = 56;
pub const _SC_ASYNCHRONOUS_IO: ::c_int = 57;
pub const _SC_FSYNC: ::c_int = 58;
pub const _SC_MAPPED_FILES: ::c_int = 59;
pub const _SC_MEMLOCK: ::c_int = 60;
pub const _SC_MEMLOCK_RANGE: ::c_int = 61;
pub const _SC_MEMORY_PROTECTION: ::c_int = 62;
pub const _SC_MESSAGE_PASSING: ::c_int = 63;
pub const _SC_PRIORITIZED_IO: ::c_int = 64;
pub const _SC_PRIORITY_SCHEDULING: ::c_int = 65;
pub const _SC_REALTIME_SIGNALS: ::c_int = 66;
pub const _SC_SEMAPHORES: ::c_int = 67;
pub const _SC_SHARED_MEMORY_OBJECTS: ::c_int = 68;
pub const _SC_SYNCHRONIZED_IO: ::c_int = 69;
pub const _SC_TIMERS: ::c_int = 70;
pub const _SC_GETGR_R_SIZE_MAX: ::c_int = 71;
pub const _SC_GETPW_R_SIZE_MAX: ::c_int = 72;
pub const _SC_LOGIN_NAME_MAX: ::c_int = 73;
pub const _SC_THREAD_DESTRUCTOR_ITERATIONS: ::c_int = 74;
pub const _SC_THREAD_KEYS_MAX: ::c_int = 75;
pub const _SC_THREAD_STACK_MIN: ::c_int = 76;
pub const _SC_THREAD_THREADS_MAX: ::c_int = 77;
pub const _SC_TTY_NAME_MAX: ::c_int = 78;
pub const _SC_THREADS: ::c_int = 79;
pub const _SC_THREAD_ATTR_STACKADDR: ::c_int = 80;
pub const _SC_THREAD_ATTR_STACKSIZE: ::c_int = 81;
pub const _SC_THREAD_PRIORITY_SCHEDULING: ::c_int = 82;
pub const _SC_THREAD_PRIO_INHERIT: ::c_int = 83;
pub const _SC_THREAD_PRIO_PROTECT: ::c_int = 84;
pub const _SC_THREAD_SAFE_FUNCTIONS: ::c_int = 85;
pub const _SC_NPROCESSORS_CONF: ::c_int = 96;
pub const _SC_NPROCESSORS_ONLN: ::c_int = 97;
pub const _SC_PHYS_PAGES: ::c_int = 98;
pub const _SC_AVPHYS_PAGES: ::c_int = 99;
pub const _SC_MONOTONIC_CLOCK: ::c_int = 100;
pub const _SC_2_PBS: ::c_int = 101;
pub const _SC_2_PBS_ACCOUNTING: ::c_int = 102;
pub const _SC_2_PBS_CHECKPOINT: ::c_int = 103;
pub const _SC_2_PBS_LOCATE: ::c_int = 104;
pub const _SC_2_PBS_MESSAGE: ::c_int = 105;
pub const _SC_2_PBS_TRACK: ::c_int = 106;
pub const _SC_ADVISORY_INFO: ::c_int = 107;
pub const _SC_BARRIERS: ::c_int = 108;
pub const _SC_CLOCK_SELECTION: ::c_int = 109;
pub const _SC_CPUTIME: ::c_int = 110;
pub const _SC_HOST_NAME_MAX: ::c_int = 111;
pub const _SC_IPV6: ::c_int = 112;
pub const _SC_RAW_SOCKETS: ::c_int = 113;
pub const _SC_READER_WRITER_LOCKS: ::c_int = 114;
pub const _SC_REGEXP: ::c_int = 115;
pub const _SC_SHELL: ::c_int = 116;
pub const _SC_SPAWN: ::c_int = 117;
pub const _SC_SPIN_LOCKS: ::c_int = 118;
pub const _SC_SPORADIC_SERVER: ::c_int = 119;
pub const _SC_SS_REPL_MAX: ::c_int = 120;
pub const _SC_SYMLOOP_MAX: ::c_int = 121;
pub const _SC_THREAD_CPUTIME: ::c_int = 122;
pub const _SC_THREAD_PROCESS_SHARED: ::c_int = 123;
pub const _SC_THREAD_ROBUST_PRIO_INHERIT: ::c_int = 124;
pub const _SC_THREAD_ROBUST_PRIO_PROTECT: ::c_int = 125;
pub const _SC_THREAD_SPORADIC_SERVER: ::c_int = 126;
pub const _SC_TIMEOUTS: ::c_int = 127;
pub const _SC_TRACE: ::c_int = 128;
pub const _SC_TRACE_EVENT_FILTER: ::c_int = 129;
pub const _SC_TRACE_EVENT_NAME_MAX: ::c_int = 130;
pub const _SC_TRACE_INHERIT: ::c_int = 131;
pub const _SC_TRACE_LOG: ::c_int = 132;
pub const _SC_TRACE_NAME_MAX: ::c_int = 133;
pub const _SC_TRACE_SYS_MAX: ::c_int = 134;
pub const _SC_TRACE_USER_EVENT_MAX: ::c_int = 135;
pub const _SC_TYPED_MEMORY_OBJECTS: ::c_int = 136;
pub const _SC_V7_ILP32_OFF32: ::c_int = 137;
pub const _SC_V7_ILP32_OFFBIG: ::c_int = 138;
pub const _SC_V7_LP64_OFF64: ::c_int = 139;
pub const _SC_V7_LPBIG_OFFBIG: ::c_int = 140;
pub const _SC_XOPEN_STREAMS: ::c_int = 141;
pub const _SC_XOPEN_UUCP: ::c_int = 142;
pub const F_LOCK: ::c_int = 1;
pub const F_TEST: ::c_int = 3;
pub const F_TLOCK: ::c_int = 2;
pub const F_ULOCK: ::c_int = 0;
pub const PTHREAD_MUTEX_NORMAL: ::c_int = 0;
pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 1;
pub const PTHREAD_MUTEX_ERRORCHECK: ::c_int = 2;
pub const PTHREAD_MUTEX_DEFAULT: ::c_int = PTHREAD_MUTEX_NORMAL;
// stdio.h
pub const RENAME_NOREPLACE: ::c_int = 1;
pub const RENAME_EXCHANGE: ::c_int = 2;
pub const RENAME_WHITEOUT: ::c_int = 4;
pub const FIOCLEX: ::c_int = 0x5451;
pub const FIONCLEX: ::c_int = 0x5450;
pub const SIGCHLD: ::c_int = 17;
pub const SIGBUS: ::c_int = 7;
pub const SIGUSR1: ::c_int = 10;
pub const SIGUSR2: ::c_int = 12;
pub const SIGCONT: ::c_int = 18;
pub const SIGSTOP: ::c_int = 19;
pub const SIGTSTP: ::c_int = 20;
pub const SIGURG: ::c_int = 23;
pub const SIGIO: ::c_int = 29;
pub const SIGSYS: ::c_int = 31;
pub const SIGSTKFLT: ::c_int = 16;
#[deprecated(since = "0.2.55", note = "Use SIGSYS instead")]
pub const SIGUNUSED: ::c_int = 31;
pub const SIGTTIN: ::c_int = 21;
pub const SIGTTOU: ::c_int = 22;
pub const SIGXCPU: ::c_int = 24;
pub const SIGXFSZ: ::c_int = 25;
pub const SIGVTALRM: ::c_int = 26;
pub const SIGPROF: ::c_int = 27;
pub const SIGWINCH: ::c_int = 28;
pub const SIGPOLL: ::c_int = 29;
pub const SIGPWR: ::c_int = 30;
pub const SIG_SETMASK: ::c_int = 2;
pub const SIG_BLOCK: ::c_int = 0x000000;
pub const SIG_UNBLOCK: ::c_int = 0x01;
pub const RUSAGE_CHILDREN: ::c_int = -1;
pub const LC_PAPER: ::c_int = 7;
pub const LC_NAME: ::c_int = 8;
pub const LC_ADDRESS: ::c_int = 9;
pub const LC_TELEPHONE: ::c_int = 10;
pub const LC_MEASUREMENT: ::c_int = 11;
pub const LC_IDENTIFICATION: ::c_int = 12;
pub const LC_PAPER_MASK: ::c_int = 1 << LC_PAPER;
pub const LC_NAME_MASK: ::c_int = 1 << LC_NAME;
pub const LC_ADDRESS_MASK: ::c_int = 1 << LC_ADDRESS;
pub const LC_TELEPHONE_MASK: ::c_int = 1 << LC_TELEPHONE;
pub const LC_MEASUREMENT_MASK: ::c_int = 1 << LC_MEASUREMENT;
pub const LC_IDENTIFICATION_MASK: ::c_int = 1 << LC_IDENTIFICATION;
pub const LC_ALL_MASK: ::c_int = ::LC_CTYPE_MASK
| ::LC_NUMERIC_MASK
| ::LC_TIME_MASK
| ::LC_COLLATE_MASK
| ::LC_MONETARY_MASK
| ::LC_MESSAGES_MASK
| LC_PAPER_MASK
| LC_NAME_MASK
| LC_ADDRESS_MASK
| LC_TELEPHONE_MASK
| LC_MEASUREMENT_MASK
| LC_IDENTIFICATION_MASK;
pub const MAP_ANON: ::c_int = 0x0020;
pub const MAP_ANONYMOUS: ::c_int = 0x0020;
pub const MAP_GROWSDOWN: ::c_int = 0x0100;
pub const MAP_DENYWRITE: ::c_int = 0x0800;
pub const MAP_EXECUTABLE: ::c_int = 0x01000;
pub const MAP_LOCKED: ::c_int = 0x02000;
pub const MAP_NORESERVE: ::c_int = 0x04000;
pub const MAP_POPULATE: ::c_int = 0x08000;
pub const MAP_NONBLOCK: ::c_int = 0x010000;
pub const MAP_STACK: ::c_int = 0x020000;
pub const EDEADLK: ::c_int = 35;
pub const ENAMETOOLONG: ::c_int = 36;
pub const ENOLCK: ::c_int = 37;
pub const ENOSYS: ::c_int = 38;
pub const ENOTEMPTY: ::c_int = 39;
pub const ELOOP: ::c_int = 40;
pub const ENOMSG: ::c_int = 42;
pub const EIDRM: ::c_int = 43;
pub const ECHRNG: ::c_int = 44;
pub const EL2NSYNC: ::c_int = 45;
pub const EL3HLT: ::c_int = 46;
pub const EL3RST: ::c_int = 47;
pub const ELNRNG: ::c_int = 48;
pub const EUNATCH: ::c_int = 49;
pub const ENOCSI: ::c_int = 50;
pub const EL2HLT: ::c_int = 51;
pub const EBADE: ::c_int = 52;
pub const EBADR: ::c_int = 53;
pub const EXFULL: ::c_int = 54;
pub const ENOANO: ::c_int = 55;
pub const EBADRQC: ::c_int = 56;
pub const EBADSLT: ::c_int = 57;
pub const EMULTIHOP: ::c_int = 72;
pub const EBADMSG: ::c_int = 74;
pub const EOVERFLOW: ::c_int = 75;
pub const ENOTUNIQ: ::c_int = 76;
pub const EBADFD: ::c_int = 77;
pub const EREMCHG: ::c_int = 78;
pub const ELIBACC: ::c_int = 79;
pub const ELIBBAD: ::c_int = 80;
pub const ELIBSCN: ::c_int = 81;
pub const ELIBMAX: ::c_int = 82;
pub const ELIBEXEC: ::c_int = 83;
pub const EILSEQ: ::c_int = 84;
pub const ERESTART: ::c_int = 85;
pub const ESTRPIPE: ::c_int = 86;
pub const EUSERS: ::c_int = 87;
pub const ENOTSOCK: ::c_int = 88;
pub const EDESTADDRREQ: ::c_int = 89;
pub const EMSGSIZE: ::c_int = 90;
pub const EPROTOTYPE: ::c_int = 91;
pub const ENOPROTOOPT: ::c_int = 92;
pub const EPROTONOSUPPORT: ::c_int = 93;
pub const ESOCKTNOSUPPORT: ::c_int = 94;
pub const EOPNOTSUPP: ::c_int = 95;
pub const ENOTSUP: ::c_int = EOPNOTSUPP;
pub const EPFNOSUPPORT: ::c_int = 96;
pub const EAFNOSUPPORT: ::c_int = 97;
pub const EADDRINUSE: ::c_int = 98;
pub const EADDRNOTAVAIL: ::c_int = 99;
pub const ENETDOWN: ::c_int = 100;
pub const ENETUNREACH: ::c_int = 101;
pub const ENETRESET: ::c_int = 102;
pub const ECONNABORTED: ::c_int = 103;
pub const ECONNRESET: ::c_int = 104;
pub const ENOBUFS: ::c_int = 105;
pub const EISCONN: ::c_int = 106;
pub const ENOTCONN: ::c_int = 107;
pub const ESHUTDOWN: ::c_int = 108;
pub const ETOOMANYREFS: ::c_int = 109;
pub const ETIMEDOUT: ::c_int = 110;
pub const ECONNREFUSED: ::c_int = 111;
pub const EHOSTDOWN: ::c_int = 112;
pub const EHOSTUNREACH: ::c_int = 113;
pub const EALREADY: ::c_int = 114;
pub const EINPROGRESS: ::c_int = 115;
pub const ESTALE: ::c_int = 116;
pub const EUCLEAN: ::c_int = 117;
pub const ENOTNAM: ::c_int = 118;
pub const ENAVAIL: ::c_int = 119;
pub const EISNAM: ::c_int = 120;
pub const EREMOTEIO: ::c_int = 121;
pub const EDQUOT: ::c_int = 122;
pub const ENOMEDIUM: ::c_int = 123;
pub const EMEDIUMTYPE: ::c_int = 124;
pub const ECANCELED: ::c_int = 125;
pub const ENOKEY: ::c_int = 126;
pub const EKEYEXPIRED: ::c_int = 127;
pub const EKEYREVOKED: ::c_int = 128;
pub const EKEYREJECTED: ::c_int = 129;
pub const EOWNERDEAD: ::c_int = 130;
pub const ENOTRECOVERABLE: ::c_int = 131;
pub const SOCK_STREAM: ::c_int = 1;
pub const SOCK_DGRAM: ::c_int = 2;
pub const SOCK_SEQPACKET: ::c_int = 5;
pub const SOCK_DCCP: ::c_int = 6;
pub const SOCK_PACKET: ::c_int = 10;
pub const IPPROTO_MAX: ::c_int = 256;
pub const SOL_SOCKET: ::c_int = 1;
pub const SOL_SCTP: ::c_int = 132;
pub const SOL_IPX: ::c_int = 256;
pub const SOL_AX25: ::c_int = 257;
pub const SOL_ATALK: ::c_int = 258;
pub const SOL_NETROM: ::c_int = 259;
pub const SOL_ROSE: ::c_int = 260;
/* DCCP socket options */
pub const DCCP_SOCKOPT_PACKET_SIZE: ::c_int = 1;
pub const DCCP_SOCKOPT_SERVICE: ::c_int = 2;
pub const DCCP_SOCKOPT_CHANGE_L: ::c_int = 3;
pub const DCCP_SOCKOPT_CHANGE_R: ::c_int = 4;
pub const DCCP_SOCKOPT_GET_CUR_MPS: ::c_int = 5;
pub const DCCP_SOCKOPT_SERVER_TIMEWAIT: ::c_int = 6;
pub const DCCP_SOCKOPT_SEND_CSCOV: ::c_int = 10;
pub const DCCP_SOCKOPT_RECV_CSCOV: ::c_int = 11;
pub const DCCP_SOCKOPT_AVAILABLE_CCIDS: ::c_int = 12;
pub const DCCP_SOCKOPT_CCID: ::c_int = 13;
pub const DCCP_SOCKOPT_TX_CCID: ::c_int = 14;
pub const DCCP_SOCKOPT_RX_CCID: ::c_int = 15;
pub const DCCP_SOCKOPT_QPOLICY_ID: ::c_int = 16;
pub const DCCP_SOCKOPT_QPOLICY_TXQLEN: ::c_int = 17;
pub const DCCP_SOCKOPT_CCID_RX_INFO: ::c_int = 128;
pub const DCCP_SOCKOPT_CCID_TX_INFO: ::c_int = 192;
/// maximum number of services provided on the same listening port
pub const DCCP_SERVICE_LIST_MAX_LEN: ::c_int = 32;
pub const SO_REUSEADDR: ::c_int = 2;
pub const SO_TYPE: ::c_int = 3;
pub const SO_ERROR: ::c_int = 4;
pub const SO_DONTROUTE: ::c_int = 5;
pub const SO_BROADCAST: ::c_int = 6;
pub const SO_SNDBUF: ::c_int = 7;
pub const SO_RCVBUF: ::c_int = 8;
pub const SO_KEEPALIVE: ::c_int = 9;
pub const SO_OOBINLINE: ::c_int = 10;
pub const SO_PRIORITY: ::c_int = 12;
pub const SO_LINGER: ::c_int = 13;
pub const SO_BSDCOMPAT: ::c_int = 14;
pub const SO_REUSEPORT: ::c_int = 15;
pub const SO_PASSCRED: ::c_int = 16;
pub const SO_PEERCRED: ::c_int = 17;
pub const SO_RCVLOWAT: ::c_int = 18;
pub const SO_SNDLOWAT: ::c_int = 19;
pub const SO_RCVTIMEO: ::c_int = 20;
pub const SO_SNDTIMEO: ::c_int = 21;
pub const SO_BINDTODEVICE: ::c_int = 25;
pub const SO_ATTACH_FILTER: ::c_int = 26;
pub const SO_DETACH_FILTER: ::c_int = 27;
pub const SO_GET_FILTER: ::c_int = SO_ATTACH_FILTER;
pub const SO_TIMESTAMP: ::c_int = 29;
pub const SO_ACCEPTCONN: ::c_int = 30;
pub const SO_PEERSEC: ::c_int = 31;
pub const SO_SNDBUFFORCE: ::c_int = 32;
pub const SO_RCVBUFFORCE: ::c_int = 33;
pub const SO_PASSSEC: ::c_int = 34;
pub const SO_MARK: ::c_int = 36;
pub const SO_PROTOCOL: ::c_int = 38;
pub const SO_DOMAIN: ::c_int = 39;
pub const SO_RXQ_OVFL: ::c_int = 40;
pub const SO_PEEK_OFF: ::c_int = 42;
pub const SO_BUSY_POLL: ::c_int = 46;
pub const IPTOS_ECN_NOTECT: u8 = 0x00;
pub const O_ACCMODE: ::c_int = 3;
pub const O_APPEND: ::c_int = 1024;
pub const O_CREAT: ::c_int = 64;
pub const O_EXCL: ::c_int = 128;
pub const O_NOCTTY: ::c_int = 256;
pub const O_NONBLOCK: ::c_int = 2048;
pub const O_SYNC: ::c_int = 0x101000;
pub const O_ASYNC: ::c_int = 0x2000;
pub const O_NDELAY: ::c_int = 0x800;
pub const O_DSYNC: ::c_int = 4096;
pub const O_RSYNC: ::c_int = O_SYNC;
pub const NI_MAXHOST: ::size_t = 1025;
pub const NI_MAXSERV: ::size_t = 32;
pub const NI_NOFQDN: ::c_int = 0x00000001;
pub const NI_NUMERICHOST: ::c_int = 0x00000002;
pub const NI_NAMEREQD: ::c_int = 0x00000004;
pub const NI_NUMERICSERV: ::c_int = 0x00000008;
pub const NI_DGRAM: ::c_int = 0x00000010;
pub const NCCS: usize = 19;
pub const TCSBRKP: ::c_int = 0x5425;
pub const TCSANOW: ::c_int = 0;
pub const TCSADRAIN: ::c_int = 0x1;
pub const TCSAFLUSH: ::c_int = 0x2;
pub const VEOF: usize = 4;
pub const VEOL: usize = 11;
pub const VEOL2: usize = 16;
pub const VMIN: usize = 6;
pub const IEXTEN: ::tcflag_t = 0x00008000;
pub const TOSTOP: ::tcflag_t = 0x00000100;
pub const FLUSHO: ::tcflag_t = 0x00001000;
pub const EXTPROC: ::tcflag_t = 0o200000;
pub const ADFS_SUPER_MAGIC: ::c_long = 0x0000adf5;
pub const AFFS_SUPER_MAGIC: ::c_long = 0x0000adff;
pub const CODA_SUPER_MAGIC: ::c_long = 0x73757245;
pub const CRAMFS_MAGIC: ::c_long = 0x28cd3d45;
pub const EFS_SUPER_MAGIC: ::c_long = 0x00414a53;
pub const EXT2_SUPER_MAGIC: ::c_long = 0x0000ef53;
pub const EXT3_SUPER_MAGIC: ::c_long = 0x0000ef53;
pub const EXT4_SUPER_MAGIC: ::c_long = 0x0000ef53;
pub const HPFS_SUPER_MAGIC: ::c_long = 0xf995e849;
pub const HUGETLBFS_MAGIC: ::c_long = 0x958458f6;
pub const ISOFS_SUPER_MAGIC: ::c_long = 0x00009660;
pub const JFFS2_SUPER_MAGIC: ::c_long = 0x000072b6;
pub const MINIX_SUPER_MAGIC: ::c_long = 0x0000137f;
pub const MINIX_SUPER_MAGIC2: ::c_long = 0x0000138f;
pub const MINIX2_SUPER_MAGIC: ::c_long = 0x00002468;
pub const MINIX2_SUPER_MAGIC2: ::c_long = 0x00002478;
pub const MSDOS_SUPER_MAGIC: ::c_long = 0x00004d44;
pub const NCP_SUPER_MAGIC: ::c_long = 0x0000564c;
pub const NFS_SUPER_MAGIC: ::c_long = 0x00006969;
pub const OPENPROM_SUPER_MAGIC: ::c_long = 0x00009fa1;
pub const PROC_SUPER_MAGIC: ::c_long = 0x00009fa0;
pub const QNX4_SUPER_MAGIC: ::c_long = 0x0000002f;
pub const REISERFS_SUPER_MAGIC: ::c_long = 0x52654973;
pub const SMB_SUPER_MAGIC: ::c_long = 0x0000517b;
pub const TMPFS_MAGIC: ::c_long = 0x01021994;
pub const USBDEVICE_SUPER_MAGIC: ::c_long = 0x00009fa2;
pub const MAP_HUGETLB: ::c_int = 0x040000;
pub const PTRACE_TRACEME: ::c_int = 0;
pub const PTRACE_PEEKTEXT: ::c_int = 1;
pub const PTRACE_PEEKDATA: ::c_int = 2;
pub const PTRACE_PEEKUSER: ::c_int = 3;
pub const PTRACE_POKETEXT: ::c_int = 4;
pub const PTRACE_POKEDATA: ::c_int = 5;
pub const PTRACE_POKEUSER: ::c_int = 6;
pub const PTRACE_CONT: ::c_int = 7;
pub const PTRACE_KILL: ::c_int = 8;
pub const PTRACE_SINGLESTEP: ::c_int = 9;
pub const PTRACE_ATTACH: ::c_int = 16;
pub const PTRACE_DETACH: ::c_int = 17;
pub const PTRACE_SYSCALL: ::c_int = 24;
pub const PTRACE_SETOPTIONS: ::c_int = 0x4200;
pub const PTRACE_GETEVENTMSG: ::c_int = 0x4201;
pub const PTRACE_GETSIGINFO: ::c_int = 0x4202;
pub const PTRACE_SETSIGINFO: ::c_int = 0x4203;
pub const PTRACE_GETREGSET: ::c_int = 0x4204;
pub const PTRACE_SETREGSET: ::c_int = 0x4205;
pub const PTRACE_EVENT_STOP: ::c_int = 128;
pub const F_GETLK: ::c_int = 5;
pub const F_GETOWN: ::c_int = 9;
pub const F_SETOWN: ::c_int = 8;
pub const F_SETLK: ::c_int = 6;
pub const F_SETLKW: ::c_int = 7;
pub const F_RDLCK: ::c_int = 0;
pub const F_WRLCK: ::c_int = 1;
pub const F_UNLCK: ::c_int = 2;
pub const F_OFD_GETLK: ::c_int = 36;
pub const F_OFD_SETLK: ::c_int = 37;
pub const F_OFD_SETLKW: ::c_int = 38;
pub const RLIMIT_CPU: ::c_int = 0;
pub const RLIMIT_FSIZE: ::c_int = 1;
pub const RLIMIT_DATA: ::c_int = 2;
pub const RLIMIT_STACK: ::c_int = 3;
pub const RLIMIT_CORE: ::c_int = 4;
pub const RLIMIT_RSS: ::c_int = 5;
pub const RLIMIT_NPROC: ::c_int = 6;
pub const RLIMIT_NOFILE: ::c_int = 7;
pub const RLIMIT_MEMLOCK: ::c_int = 8;
pub const RLIMIT_AS: ::c_int = 9;
pub const RLIMIT_LOCKS: ::c_int = 10;
pub const RLIMIT_SIGPENDING: ::c_int = 11;
pub const RLIMIT_MSGQUEUE: ::c_int = 12;
pub const RLIMIT_NICE: ::c_int = 13;
pub const RLIMIT_RTPRIO: ::c_int = 14;
pub const RLIM_NLIMITS: ::c_int = 16;
pub const RLIM_INFINITY: ::rlim_t = !0;
pub const TCGETS: ::c_int = 0x5401;
pub const TCSETS: ::c_int = 0x5402;
pub const TCSETSW: ::c_int = 0x5403;
pub const TCSETSF: ::c_int = 0x5404;
pub const TCGETS2: ::c_int = 0x802c542a;
pub const TCSETS2: ::c_int = 0x402c542b;
pub const TCSETSW2: ::c_int = 0x402c542c;
pub const TCSETSF2: ::c_int = 0x402c542d;
pub const TCGETA: ::c_int = 0x5405;
pub const TCSETA: ::c_int = 0x5406;
pub const TCSETAW: ::c_int = 0x5407;
pub const TCSETAF: ::c_int = 0x5408;
pub const TCSBRK: ::c_int = 0x5409;
pub const TCXONC: ::c_int = 0x540A;
pub const TCFLSH: ::c_int = 0x540B;
pub const TIOCGSOFTCAR: ::c_int = 0x5419;
pub const TIOCSSOFTCAR: ::c_int = 0x541A;
pub const TIOCINQ: ::c_int = 0x541B;
pub const TIOCLINUX: ::c_int = 0x541C;
pub const TIOCGSERIAL: ::c_int = 0x541E;
pub const TIOCEXCL: ::c_int = 0x540C;
pub const TIOCNXCL: ::c_int = 0x540D;
pub const TIOCSCTTY: ::c_int = 0x540E;
pub const TIOCGPGRP: ::c_int = 0x540F;
pub const TIOCSPGRP: ::c_int = 0x5410;
pub const TIOCOUTQ: ::c_int = 0x5411;
pub const TIOCSTI: ::c_int = 0x5412;
pub const TIOCGWINSZ: ::c_int = 0x5413;
pub const TIOCSWINSZ: ::c_int = 0x5414;
pub const TIOCMGET: ::c_int = 0x5415;
pub const TIOCMBIS: ::c_int = 0x5416;
pub const TIOCMBIC: ::c_int = 0x5417;
pub const TIOCMSET: ::c_int = 0x5418;
pub const FIONREAD: ::c_int = 0x541B;
pub const TIOCCONS: ::c_int = 0x541D;
pub const TIOCSBRK: ::c_int = 0x5427;
pub const TIOCCBRK: ::c_int = 0x5428;
pub const ST_RDONLY: ::c_ulong = 1;
pub const ST_NOSUID: ::c_ulong = 2;
pub const ST_NODEV: ::c_ulong = 4;
pub const ST_NOEXEC: ::c_ulong = 8;
pub const ST_SYNCHRONOUS: ::c_ulong = 16;
pub const ST_MANDLOCK: ::c_ulong = 64;
pub const ST_NOATIME: ::c_ulong = 1024;
pub const ST_NODIRATIME: ::c_ulong = 2048;
pub const ST_RELATIME: ::c_ulong = 4096;
pub const RTLD_NOLOAD: ::c_int = 0x4;
pub const SEM_FAILED: *mut sem_t = 0 as *mut sem_t;
pub const AI_PASSIVE: ::c_int = 0x00000001;
pub const AI_CANONNAME: ::c_int = 0x00000002;
pub const AI_NUMERICHOST: ::c_int = 0x00000004;
pub const AI_NUMERICSERV: ::c_int = 0x00000008;
pub const AI_MASK: ::c_int =
AI_PASSIVE | AI_CANONNAME | AI_NUMERICHOST | AI_NUMERICSERV | AI_ADDRCONFIG;
pub const AI_ALL: ::c_int = 0x00000100;
pub const AI_V4MAPPED_CFG: ::c_int = 0x00000200;
pub const AI_ADDRCONFIG: ::c_int = 0x00000400;
pub const AI_V4MAPPED: ::c_int = 0x00000800;
pub const AI_DEFAULT: ::c_int = AI_V4MAPPED_CFG | AI_ADDRCONFIG;
pub const LINUX_REBOOT_MAGIC1: ::c_int = 0xfee1dead;
pub const LINUX_REBOOT_MAGIC2: ::c_int = 672274793;
pub const LINUX_REBOOT_MAGIC2A: ::c_int = 85072278;
pub const LINUX_REBOOT_MAGIC2B: ::c_int = 369367448;
pub const LINUX_REBOOT_MAGIC2C: ::c_int = 537993216;
pub const LINUX_REBOOT_CMD_RESTART: ::c_int = 0x01234567;
pub const LINUX_REBOOT_CMD_HALT: ::c_int = 0xCDEF0123;
pub const LINUX_REBOOT_CMD_CAD_ON: ::c_int = 0x89ABCDEF;
pub const LINUX_REBOOT_CMD_CAD_OFF: ::c_int = 0x00000000;
pub const LINUX_REBOOT_CMD_POWER_OFF: ::c_int = 0x4321FEDC;
pub const LINUX_REBOOT_CMD_RESTART2: ::c_int = 0xA1B2C3D4;
pub const LINUX_REBOOT_CMD_SW_SUSPEND: ::c_int = 0xD000FCE2;
pub const LINUX_REBOOT_CMD_KEXEC: ::c_int = 0x45584543;
pub const REG_BASIC: ::c_int = 0;
pub const REG_EXTENDED: ::c_int = 1;
pub const REG_ICASE: ::c_int = 2;
pub const REG_NOSUB: ::c_int = 4;
pub const REG_NEWLINE: ::c_int = 8;
pub const REG_NOSPEC: ::c_int = 16;
pub const REG_PEND: ::c_int = 32;
pub const REG_DUMP: ::c_int = 128;
pub const REG_NOMATCH: ::c_int = 1;
pub const REG_BADPAT: ::c_int = 2;
pub const REG_ECOLLATE: ::c_int = 3;
pub const REG_ECTYPE: ::c_int = 4;
pub const REG_EESCAPE: ::c_int = 5;
pub const REG_ESUBREG: ::c_int = 6;
pub const REG_EBRACK: ::c_int = 7;
pub const REG_EPAREN: ::c_int = 8;
pub const REG_EBRACE: ::c_int = 9;
pub const REG_BADBR: ::c_int = 10;
pub const REG_ERANGE: ::c_int = 11;
pub const REG_ESPACE: ::c_int = 12;
pub const REG_BADRPT: ::c_int = 13;
pub const REG_EMPTY: ::c_int = 14;
pub const REG_ASSERT: ::c_int = 15;
pub const REG_INVARG: ::c_int = 16;
pub const REG_ATOI: ::c_int = 255;
pub const REG_ITOA: ::c_int = 256;
pub const REG_NOTBOL: ::c_int = 1;
pub const REG_NOTEOL: ::c_int = 2;
pub const REG_STARTEND: ::c_int = 4;
pub const REG_TRACE: ::c_int = 256;
pub const REG_LARGE: ::c_int = 512;
pub const REG_BACKR: ::c_int = 1024;
pub const MCL_CURRENT: ::c_int = 0x0001;
pub const MCL_FUTURE: ::c_int = 0x0002;
pub const CBAUD: ::tcflag_t = 0o0010017;
pub const TAB1: ::tcflag_t = 0x00000800;
pub const TAB2: ::tcflag_t = 0x00001000;
pub const TAB3: ::tcflag_t = 0x00001800;
pub const CR1: ::tcflag_t = 0x00000200;
pub const CR2: ::tcflag_t = 0x00000400;
pub const CR3: ::tcflag_t = 0x00000600;
pub const FF1: ::tcflag_t = 0x00008000;
pub const BS1: ::tcflag_t = 0x00002000;
pub const VT1: ::tcflag_t = 0x00004000;
pub const VWERASE: usize = 14;
pub const VREPRINT: usize = 12;
pub const VSUSP: usize = 10;
pub const VSTART: usize = 8;
pub const VSTOP: usize = 9;
pub const VDISCARD: usize = 13;
pub const VTIME: usize = 5;
pub const IXON: ::tcflag_t = 0x00000400;
pub const IXOFF: ::tcflag_t = 0x00001000;
pub const ONLCR: ::tcflag_t = 0x4;
pub const CSIZE: ::tcflag_t = 0x00000030;
pub const CS6: ::tcflag_t = 0x00000010;
pub const CS7: ::tcflag_t = 0x00000020;
pub const CS8: ::tcflag_t = 0x00000030;
pub const CSTOPB: ::tcflag_t = 0x00000040;
pub const CREAD: ::tcflag_t = 0x00000080;
pub const PARENB: ::tcflag_t = 0x00000100;
pub const PARODD: ::tcflag_t = 0x00000200;
pub const HUPCL: ::tcflag_t = 0x00000400;
pub const CLOCAL: ::tcflag_t = 0x00000800;
pub const ECHOKE: ::tcflag_t = 0x00000800;
pub const ECHOE: ::tcflag_t = 0x00000010;
pub const ECHOK: ::tcflag_t = 0x00000020;
pub const ECHONL: ::tcflag_t = 0x00000040;
pub const ECHOPRT: ::tcflag_t = 0x00000400;
pub const ECHOCTL: ::tcflag_t = 0x00000200;
pub const ISIG: ::tcflag_t = 0x00000001;
pub const ICANON: ::tcflag_t = 0x00000002;
pub const PENDIN: ::tcflag_t = 0x00004000;
pub const NOFLSH: ::tcflag_t = 0x00000080;
pub const VSWTC: usize = 7;
pub const OLCUC: ::tcflag_t = 0o000002;
pub const NLDLY: ::tcflag_t = 0o000400;
pub const CRDLY: ::tcflag_t = 0o003000;
pub const TABDLY: ::tcflag_t = 0o014000;
pub const BSDLY: ::tcflag_t = 0o020000;
pub const FFDLY: ::tcflag_t = 0o100000;
pub const VTDLY: ::tcflag_t = 0o040000;
pub const XTABS: ::tcflag_t = 0o014000;
pub const B0: ::speed_t = 0o000000;
pub const B50: ::speed_t = 0o000001;
pub const B75: ::speed_t = 0o000002;
pub const B110: ::speed_t = 0o000003;
pub const B134: ::speed_t = 0o000004;
pub const B150: ::speed_t = 0o000005;
pub const B200: ::speed_t = 0o000006;
pub const B300: ::speed_t = 0o000007;
pub const B600: ::speed_t = 0o000010;
pub const B1200: ::speed_t = 0o000011;
pub const B1800: ::speed_t = 0o000012;
pub const B2400: ::speed_t = 0o000013;
pub const B4800: ::speed_t = 0o000014;
pub const B9600: ::speed_t = 0o000015;
pub const B19200: ::speed_t = 0o000016;
pub const B38400: ::speed_t = 0o000017;
pub const EXTA: ::speed_t = B19200;
pub const EXTB: ::speed_t = B38400;
pub const BOTHER: ::speed_t = 0o010000;
pub const B57600: ::speed_t = 0o010001;
pub const B115200: ::speed_t = 0o010002;
pub const B230400: ::speed_t = 0o010003;
pub const B460800: ::speed_t = 0o010004;
pub const B500000: ::speed_t = 0o010005;
pub const B576000: ::speed_t = 0o010006;
pub const B921600: ::speed_t = 0o010007;
pub const B1000000: ::speed_t = 0o010010;
pub const B1152000: ::speed_t = 0o010011;
pub const B1500000: ::speed_t = 0o010012;
pub const B2000000: ::speed_t = 0o010013;
pub const B2500000: ::speed_t = 0o010014;
pub const B3000000: ::speed_t = 0o010015;
pub const B3500000: ::speed_t = 0o010016;
pub const B4000000: ::speed_t = 0o010017;
pub const IBSHIFT: ::tcflag_t = 16;
pub const EAI_AGAIN: ::c_int = 2;
pub const EAI_BADFLAGS: ::c_int = 3;
pub const EAI_FAIL: ::c_int = 4;
pub const EAI_FAMILY: ::c_int = 5;
pub const EAI_MEMORY: ::c_int = 6;
pub const EAI_NODATA: ::c_int = 7;
pub const EAI_NONAME: ::c_int = 8;
pub const EAI_SERVICE: ::c_int = 9;
pub const EAI_SOCKTYPE: ::c_int = 10;
pub const EAI_SYSTEM: ::c_int = 11;
pub const EAI_OVERFLOW: ::c_int = 14;
pub const NETLINK_ROUTE: ::c_int = 0;
pub const NETLINK_UNUSED: ::c_int = 1;
pub const NETLINK_USERSOCK: ::c_int = 2;
pub const NETLINK_FIREWALL: ::c_int = 3;
pub const NETLINK_SOCK_DIAG: ::c_int = 4;
pub const NETLINK_NFLOG: ::c_int = 5;
pub const NETLINK_XFRM: ::c_int = 6;
pub const NETLINK_SELINUX: ::c_int = 7;
pub const NETLINK_ISCSI: ::c_int = 8;
pub const NETLINK_AUDIT: ::c_int = 9;
pub const NETLINK_FIB_LOOKUP: ::c_int = 10;
pub const NETLINK_CONNECTOR: ::c_int = 11;
pub const NETLINK_NETFILTER: ::c_int = 12;
pub const NETLINK_IP6_FW: ::c_int = 13;
pub const NETLINK_DNRTMSG: ::c_int = 14;
pub const NETLINK_KOBJECT_UEVENT: ::c_int = 15;
pub const NETLINK_GENERIC: ::c_int = 16;
pub const NETLINK_SCSITRANSPORT: ::c_int = 18;
pub const NETLINK_ECRYPTFS: ::c_int = 19;
pub const NETLINK_RDMA: ::c_int = 20;
pub const NETLINK_CRYPTO: ::c_int = 21;
pub const NETLINK_INET_DIAG: ::c_int = NETLINK_SOCK_DIAG;
pub const MAX_LINKS: ::c_int = 32;
pub const NLM_F_REQUEST: ::c_int = 1;
pub const NLM_F_MULTI: ::c_int = 2;
pub const NLM_F_ACK: ::c_int = 4;
pub const NLM_F_ECHO: ::c_int = 8;
pub const NLM_F_DUMP_INTR: ::c_int = 16;
pub const NLM_F_ROOT: ::c_int = 0x100;
pub const NLM_F_MATCH: ::c_int = 0x200;
pub const NLM_F_ATOMIC: ::c_int = 0x400;
pub const NLM_F_DUMP: ::c_int = NLM_F_ROOT | NLM_F_MATCH;
pub const NLM_F_REPLACE: ::c_int = 0x100;
pub const NLM_F_EXCL: ::c_int = 0x200;
pub const NLM_F_CREATE: ::c_int = 0x400;
pub const NLM_F_APPEND: ::c_int = 0x800;
pub const NLMSG_NOOP: ::c_int = 0x1;
pub const NLMSG_ERROR: ::c_int = 0x2;
pub const NLMSG_DONE: ::c_int = 0x3;
pub const NLMSG_OVERRUN: ::c_int = 0x4;
pub const NLMSG_MIN_TYPE: ::c_int = 0x10;
// linux/netfilter/nfnetlink.h
pub const NFNLGRP_NONE: ::c_int = 0;
pub const NFNLGRP_CONNTRACK_NEW: ::c_int = 1;
pub const NFNLGRP_CONNTRACK_UPDATE: ::c_int = 2;
pub const NFNLGRP_CONNTRACK_DESTROY: ::c_int = 3;
pub const NFNLGRP_CONNTRACK_EXP_NEW: ::c_int = 4;
pub const NFNLGRP_CONNTRACK_EXP_UPDATE: ::c_int = 5;
pub const NFNLGRP_CONNTRACK_EXP_DESTROY: ::c_int = 6;
pub const NFNLGRP_NFTABLES: ::c_int = 7;
pub const NFNLGRP_ACCT_QUOTA: ::c_int = 8;
pub const NFNETLINK_V0: ::c_int = 0;
pub const NFNL_SUBSYS_NONE: ::c_int = 0;
pub const NFNL_SUBSYS_CTNETLINK: ::c_int = 1;
pub const NFNL_SUBSYS_CTNETLINK_EXP: ::c_int = 2;
pub const NFNL_SUBSYS_QUEUE: ::c_int = 3;
pub const NFNL_SUBSYS_ULOG: ::c_int = 4;
pub const NFNL_SUBSYS_OSF: ::c_int = 5;
pub const NFNL_SUBSYS_IPSET: ::c_int = 6;
pub const NFNL_SUBSYS_ACCT: ::c_int = 7;
pub const NFNL_SUBSYS_CTNETLINK_TIMEOUT: ::c_int = 8;
pub const NFNL_SUBSYS_CTHELPER: ::c_int = 9;
pub const NFNL_SUBSYS_NFTABLES: ::c_int = 10;
pub const NFNL_SUBSYS_NFT_COMPAT: ::c_int = 11;
pub const NFNL_SUBSYS_COUNT: ::c_int = 12;
pub const NFNL_MSG_BATCH_BEGIN: ::c_int = NLMSG_MIN_TYPE;
pub const NFNL_MSG_BATCH_END: ::c_int = NLMSG_MIN_TYPE + 1;
// linux/netfilter/nfnetlink_log.h
pub const NFULNL_MSG_PACKET: ::c_int = 0;
pub const NFULNL_MSG_CONFIG: ::c_int = 1;
pub const NFULA_UNSPEC: ::c_int = 0;
pub const NFULA_PACKET_HDR: ::c_int = 1;
pub const NFULA_MARK: ::c_int = 2;
pub const NFULA_TIMESTAMP: ::c_int = 3;
pub const NFULA_IFINDEX_INDEV: ::c_int = 4;
pub const NFULA_IFINDEX_OUTDEV: ::c_int = 5;
pub const NFULA_IFINDEX_PHYSINDEV: ::c_int = 6;
pub const NFULA_IFINDEX_PHYSOUTDEV: ::c_int = 7;
pub const NFULA_HWADDR: ::c_int = 8;
pub const NFULA_PAYLOAD: ::c_int = 9;
pub const NFULA_PREFIX: ::c_int = 10;
pub const NFULA_UID: ::c_int = 11;
pub const NFULA_SEQ: ::c_int = 12;
pub const NFULA_SEQ_GLOBAL: ::c_int = 13;
pub const NFULA_GID: ::c_int = 14;
pub const NFULA_HWTYPE: ::c_int = 15;
pub const NFULA_HWHEADER: ::c_int = 16;
pub const NFULA_HWLEN: ::c_int = 17;
pub const NFULA_CT: ::c_int = 18;
pub const NFULA_CT_INFO: ::c_int = 19;
pub const NFULNL_CFG_CMD_NONE: ::c_int = 0;
pub const NFULNL_CFG_CMD_BIND: ::c_int = 1;
pub const NFULNL_CFG_CMD_UNBIND: ::c_int = 2;
pub const NFULNL_CFG_CMD_PF_BIND: ::c_int = 3;
pub const NFULNL_CFG_CMD_PF_UNBIND: ::c_int = 4;
pub const NFULA_CFG_UNSPEC: ::c_int = 0;
pub const NFULA_CFG_CMD: ::c_int = 1;
pub const NFULA_CFG_MODE: ::c_int = 2;
pub const NFULA_CFG_NLBUFSIZ: ::c_int = 3;
pub const NFULA_CFG_TIMEOUT: ::c_int = 4;
pub const NFULA_CFG_QTHRESH: ::c_int = 5;
pub const NFULA_CFG_FLAGS: ::c_int = 6;
pub const NFULNL_COPY_NONE: ::c_int = 0x00;
pub const NFULNL_COPY_META: ::c_int = 0x01;
pub const NFULNL_COPY_PACKET: ::c_int = 0x02;
pub const NFULNL_CFG_F_SEQ: ::c_int = 0x0001;
pub const NFULNL_CFG_F_SEQ_GLOBAL: ::c_int = 0x0002;
pub const NFULNL_CFG_F_CONNTRACK: ::c_int = 0x0004;
// linux/netfilter/nfnetlink_log.h
pub const NFQNL_MSG_PACKET: ::c_int = 0;
pub const NFQNL_MSG_VERDICT: ::c_int = 1;
pub const NFQNL_MSG_CONFIG: ::c_int = 2;
pub const NFQNL_MSG_VERDICT_BATCH: ::c_int = 3;
pub const NFQA_UNSPEC: ::c_int = 0;
pub const NFQA_PACKET_HDR: ::c_int = 1;
pub const NFQA_VERDICT_HDR: ::c_int = 2;
pub const NFQA_MARK: ::c_int = 3;
pub const NFQA_TIMESTAMP: ::c_int = 4;
pub const NFQA_IFINDEX_INDEV: ::c_int = 5;
pub const NFQA_IFINDEX_OUTDEV: ::c_int = 6;
pub const NFQA_IFINDEX_PHYSINDEV: ::c_int = 7;
pub const NFQA_IFINDEX_PHYSOUTDEV: ::c_int = 8;
pub const NFQA_HWADDR: ::c_int = 9;
pub const NFQA_PAYLOAD: ::c_int = 10;
pub const NFQA_CT: ::c_int = 11;
pub const NFQA_CT_INFO: ::c_int = 12;
pub const NFQA_CAP_LEN: ::c_int = 13;
pub const NFQA_SKB_INFO: ::c_int = 14;
pub const NFQA_EXP: ::c_int = 15;
pub const NFQA_UID: ::c_int = 16;
pub const NFQA_GID: ::c_int = 17;
pub const NFQA_SECCTX: ::c_int = 18;
/*
FIXME: These are not yet available in musl sanitized kernel headers and
make the tests fail. Enable them once musl has them.
See https://github.com/rust-lang/libc/pull/1628 for more details.
pub const NFQA_VLAN: ::c_int = 19;
pub const NFQA_L2HDR: ::c_int = 20;
pub const NFQA_VLAN_UNSPEC: ::c_int = 0;
pub const NFQA_VLAN_PROTO: ::c_int = 1;
pub const NFQA_VLAN_TCI: ::c_int = 2;
*/
pub const NFQNL_CFG_CMD_NONE: ::c_int = 0;
pub const NFQNL_CFG_CMD_BIND: ::c_int = 1;
pub const NFQNL_CFG_CMD_UNBIND: ::c_int = 2;
pub const NFQNL_CFG_CMD_PF_BIND: ::c_int = 3;
pub const NFQNL_CFG_CMD_PF_UNBIND: ::c_int = 4;
pub const NFQNL_COPY_NONE: ::c_int = 0;
pub const NFQNL_COPY_META: ::c_int = 1;
pub const NFQNL_COPY_PACKET: ::c_int = 2;
pub const NFQA_CFG_UNSPEC: ::c_int = 0;
pub const NFQA_CFG_CMD: ::c_int = 1;
pub const NFQA_CFG_PARAMS: ::c_int = 2;
pub const NFQA_CFG_QUEUE_MAXLEN: ::c_int = 3;
pub const NFQA_CFG_MASK: ::c_int = 4;
pub const NFQA_CFG_FLAGS: ::c_int = 5;
pub const NFQA_CFG_F_FAIL_OPEN: ::c_int = 0x0001;
pub const NFQA_CFG_F_CONNTRACK: ::c_int = 0x0002;
pub const NFQA_CFG_F_GSO: ::c_int = 0x0004;
pub const NFQA_CFG_F_UID_GID: ::c_int = 0x0008;
pub const NFQA_CFG_F_SECCTX: ::c_int = 0x0010;
pub const NFQA_CFG_F_MAX: ::c_int = 0x0020;
pub const NFQA_SKB_CSUMNOTREADY: ::c_int = 0x0001;
pub const NFQA_SKB_GSO: ::c_int = 0x0002;
pub const NFQA_SKB_CSUM_NOTVERIFIED: ::c_int = 0x0004;
pub const GENL_NAMSIZ: ::c_int = 16;
pub const GENL_MIN_ID: ::c_int = NLMSG_MIN_TYPE;
pub const GENL_MAX_ID: ::c_int = 1023;
pub const GENL_ADMIN_PERM: ::c_int = 0x01;
pub const GENL_CMD_CAP_DO: ::c_int = 0x02;
pub const GENL_CMD_CAP_DUMP: ::c_int = 0x04;
pub const GENL_CMD_CAP_HASPOL: ::c_int = 0x08;
pub const GENL_UNS_ADMIN_PERM: ::c_int = 0x10;
pub const GENL_ID_CTRL: ::c_int = NLMSG_MIN_TYPE;
pub const GENL_ID_VFS_DQUOT: ::c_int = NLMSG_MIN_TYPE + 1;
pub const GENL_ID_PMCRAID: ::c_int = NLMSG_MIN_TYPE + 2;
pub const CTRL_CMD_UNSPEC: ::c_int = 0;
pub const CTRL_CMD_NEWFAMILY: ::c_int = 1;
pub const CTRL_CMD_DELFAMILY: ::c_int = 2;
pub const CTRL_CMD_GETFAMILY: ::c_int = 3;
pub const CTRL_CMD_NEWOPS: ::c_int = 4;
pub const CTRL_CMD_DELOPS: ::c_int = 5;
pub const CTRL_CMD_GETOPS: ::c_int = 6;
pub const CTRL_CMD_NEWMCAST_GRP: ::c_int = 7;
pub const CTRL_CMD_DELMCAST_GRP: ::c_int = 8;
pub const CTRL_CMD_GETMCAST_GRP: ::c_int = 9;
pub const CTRL_ATTR_UNSPEC: ::c_int = 0;
pub const CTRL_ATTR_FAMILY_ID: ::c_int = 1;
pub const CTRL_ATTR_FAMILY_NAME: ::c_int = 2;
pub const CTRL_ATTR_VERSION: ::c_int = 3;
pub const CTRL_ATTR_HDRSIZE: ::c_int = 4;
pub const CTRL_ATTR_MAXATTR: ::c_int = 5;
pub const CTRL_ATTR_OPS: ::c_int = 6;
pub const CTRL_ATTR_MCAST_GROUPS: ::c_int = 7;
pub const CTRL_ATTR_OP_UNSPEC: ::c_int = 0;
pub const CTRL_ATTR_OP_ID: ::c_int = 1;
pub const CTRL_ATTR_OP_FLAGS: ::c_int = 2;
pub const CTRL_ATTR_MCAST_GRP_UNSPEC: ::c_int = 0;
pub const CTRL_ATTR_MCAST_GRP_NAME: ::c_int = 1;
pub const CTRL_ATTR_MCAST_GRP_ID: ::c_int = 2;
pub const NETLINK_ADD_MEMBERSHIP: ::c_int = 1;
pub const NETLINK_DROP_MEMBERSHIP: ::c_int = 2;
pub const NETLINK_PKTINFO: ::c_int = 3;
pub const NETLINK_BROADCAST_ERROR: ::c_int = 4;
pub const NETLINK_NO_ENOBUFS: ::c_int = 5;
pub const NETLINK_RX_RING: ::c_int = 6;
pub const NETLINK_TX_RING: ::c_int = 7;
pub const NETLINK_LISTEN_ALL_NSID: ::c_int = 8;
pub const NETLINK_LIST_MEMBERSHIPS: ::c_int = 9;
pub const NETLINK_CAP_ACK: ::c_int = 10;
pub const NETLINK_EXT_ACK: ::c_int = 11;
pub const NETLINK_GET_STRICT_CHK: ::c_int = 12;
pub const GRND_NONBLOCK: ::c_uint = 0x0001;
pub const GRND_RANDOM: ::c_uint = 0x0002;
pub const SECCOMP_MODE_DISABLED: ::c_uint = 0;
pub const SECCOMP_MODE_STRICT: ::c_uint = 1;
pub const SECCOMP_MODE_FILTER: ::c_uint = 2;
pub const SECCOMP_FILTER_FLAG_TSYNC: ::c_ulong = 1;
pub const SECCOMP_FILTER_FLAG_LOG: ::c_ulong = 2;
pub const SECCOMP_FILTER_FLAG_SPEC_ALLOW: ::c_ulong = 4;
pub const SECCOMP_FILTER_FLAG_NEW_LISTENER: ::c_ulong = 8;
pub const SECCOMP_RET_ACTION_FULL: ::c_uint = 0xffff0000;
pub const SECCOMP_RET_ACTION: ::c_uint = 0x7fff0000;
pub const SECCOMP_RET_DATA: ::c_uint = 0x0000ffff;
pub const SECCOMP_RET_KILL_PROCESS: ::c_uint = 0x80000000;
pub const SECCOMP_RET_KILL_THREAD: ::c_uint = 0x00000000;
pub const SECCOMP_RET_KILL: ::c_uint = SECCOMP_RET_KILL_THREAD;
pub const SECCOMP_RET_TRAP: ::c_uint = 0x00030000;
pub const SECCOMP_RET_ERRNO: ::c_uint = 0x00050000;
pub const SECCOMP_RET_USER_NOTIF: ::c_uint = 0x7fc00000;
pub const SECCOMP_RET_TRACE: ::c_uint = 0x7ff00000;
pub const SECCOMP_RET_LOG: ::c_uint = 0x7ffc0000;
pub const SECCOMP_RET_ALLOW: ::c_uint = 0x7fff0000;
pub const NLA_F_NESTED: ::c_int = 1 << 15;
pub const NLA_F_NET_BYTEORDER: ::c_int = 1 << 14;
pub const NLA_TYPE_MASK: ::c_int = !(NLA_F_NESTED | NLA_F_NET_BYTEORDER);
pub const NLA_ALIGNTO: ::c_int = 4;
pub const SIGEV_THREAD_ID: ::c_int = 4;
pub const CIBAUD: ::tcflag_t = 0o02003600000;
pub const CBAUDEX: ::tcflag_t = 0o010000;
pub const TIOCM_LE: ::c_int = 0x001;
pub const TIOCM_DTR: ::c_int = 0x002;
pub const TIOCM_RTS: ::c_int = 0x004;
pub const TIOCM_ST: ::c_int = 0x008;
pub const TIOCM_SR: ::c_int = 0x010;
pub const TIOCM_CTS: ::c_int = 0x020;
pub const TIOCM_CAR: ::c_int = 0x040;
pub const TIOCM_RNG: ::c_int = 0x080;
pub const TIOCM_DSR: ::c_int = 0x100;
pub const TIOCM_CD: ::c_int = TIOCM_CAR;
pub const TIOCM_RI: ::c_int = TIOCM_RNG;
pub const POLLWRNORM: ::c_short = 0x100;
pub const POLLWRBAND: ::c_short = 0x200;
pub const SFD_CLOEXEC: ::c_int = O_CLOEXEC;
pub const SFD_NONBLOCK: ::c_int = O_NONBLOCK;
pub const SOCK_NONBLOCK: ::c_int = O_NONBLOCK;
pub const SO_ORIGINAL_DST: ::c_int = 80;
pub const IP_RECVFRAGSIZE: ::c_int = 25;
pub const IPV6_FLOWINFO: ::c_int = 11;
pub const IPV6_MULTICAST_ALL: ::c_int = 29;
pub const IPV6_ROUTER_ALERT_ISOLATE: ::c_int = 30;
pub const IPV6_FLOWLABEL_MGR: ::c_int = 32;
pub const IPV6_FLOWINFO_SEND: ::c_int = 33;
pub const IPV6_RECVFRAGSIZE: ::c_int = 77;
pub const IPV6_FREEBIND: ::c_int = 78;
pub const IPV6_FLOWINFO_FLOWLABEL: ::c_int = 0x000fffff;
pub const IPV6_FLOWINFO_PRIORITY: ::c_int = 0x0ff00000;
pub const IUTF8: ::tcflag_t = 0x00004000;
pub const CMSPAR: ::tcflag_t = 0o10000000000;
pub const O_TMPFILE: ::c_int = 0o20000000 | O_DIRECTORY;
pub const MFD_CLOEXEC: ::c_uint = 0x0001;
pub const MFD_ALLOW_SEALING: ::c_uint = 0x0002;
pub const MFD_HUGETLB: ::c_uint = 0x0004;
// these are used in the p_type field of Elf32_Phdr and Elf64_Phdr, which has
// the type Elf32Word and Elf64Word respectively. Luckily, both of those are u32
// so we can use that type here to avoid having to cast.
pub const PT_NULL: u32 = 0;
pub const PT_LOAD: u32 = 1;
pub const PT_DYNAMIC: u32 = 2;
pub const PT_INTERP: u32 = 3;
pub const PT_NOTE: u32 = 4;
pub const PT_SHLIB: u32 = 5;
pub const PT_PHDR: u32 = 6;
pub const PT_TLS: u32 = 7;
pub const PT_LOOS: u32 = 0x60000000;
pub const PT_GNU_EH_FRAME: u32 = 0x6474e550;
pub const PT_GNU_STACK: u32 = 0x6474e551;
pub const PT_GNU_RELRO: u32 = 0x6474e552;
pub const PT_HIOS: u32 = 0x6fffffff;
pub const PT_LOPROC: u32 = 0x70000000;
pub const PT_HIPROC: u32 = 0x7fffffff;
// linux/netfilter.h
pub const NF_DROP: ::c_int = 0;
pub const NF_ACCEPT: ::c_int = 1;
pub const NF_STOLEN: ::c_int = 2;
pub const NF_QUEUE: ::c_int = 3;
pub const NF_REPEAT: ::c_int = 4;
pub const NF_STOP: ::c_int = 5;
pub const NF_MAX_VERDICT: ::c_int = NF_STOP;
pub const NF_VERDICT_MASK: ::c_int = 0x000000ff;
pub const NF_VERDICT_FLAG_QUEUE_BYPASS: ::c_int = 0x00008000;
pub const NF_VERDICT_QMASK: ::c_int = 0xffff0000;
pub const NF_VERDICT_QBITS: ::c_int = 16;
pub const NF_VERDICT_BITS: ::c_int = 16;
pub const NF_INET_PRE_ROUTING: ::c_int = 0;
pub const NF_INET_LOCAL_IN: ::c_int = 1;
pub const NF_INET_FORWARD: ::c_int = 2;
pub const NF_INET_LOCAL_OUT: ::c_int = 3;
pub const NF_INET_POST_ROUTING: ::c_int = 4;
pub const NF_INET_NUMHOOKS: ::c_int = 5;
pub const NF_NETDEV_INGRESS: ::c_int = 0;
pub const NF_NETDEV_NUMHOOKS: ::c_int = 1;
pub const NFPROTO_UNSPEC: ::c_int = 0;
pub const NFPROTO_INET: ::c_int = 1;
pub const NFPROTO_IPV4: ::c_int = 2;
pub const NFPROTO_ARP: ::c_int = 3;
pub const NFPROTO_NETDEV: ::c_int = 5;
pub const NFPROTO_BRIDGE: ::c_int = 7;
pub const NFPROTO_IPV6: ::c_int = 10;
pub const NFPROTO_DECNET: ::c_int = 12;
pub const NFPROTO_NUMPROTO: ::c_int = 13;
// linux/netfilter_ipv4.h
pub const NF_IP_PRE_ROUTING: ::c_int = 0;
pub const NF_IP_LOCAL_IN: ::c_int = 1;
pub const NF_IP_FORWARD: ::c_int = 2;
pub const NF_IP_LOCAL_OUT: ::c_int = 3;
pub const NF_IP_POST_ROUTING: ::c_int = 4;
pub const NF_IP_NUMHOOKS: ::c_int = 5;
pub const NF_IP_PRI_FIRST: ::c_int = ::INT_MIN;
pub const NF_IP_PRI_CONNTRACK_DEFRAG: ::c_int = -400;
pub const NF_IP_PRI_RAW: ::c_int = -300;
pub const NF_IP_PRI_SELINUX_FIRST: ::c_int = -225;
pub const NF_IP_PRI_CONNTRACK: ::c_int = -200;
pub const NF_IP_PRI_MANGLE: ::c_int = -150;
pub const NF_IP_PRI_NAT_DST: ::c_int = -100;
pub const NF_IP_PRI_FILTER: ::c_int = 0;
pub const NF_IP_PRI_SECURITY: ::c_int = 50;
pub const NF_IP_PRI_NAT_SRC: ::c_int = 100;
pub const NF_IP_PRI_SELINUX_LAST: ::c_int = 225;
pub const NF_IP_PRI_CONNTRACK_HELPER: ::c_int = 300;
pub const NF_IP_PRI_CONNTRACK_CONFIRM: ::c_int = ::INT_MAX;
pub const NF_IP_PRI_LAST: ::c_int = ::INT_MAX;
// linux/netfilter_ipv6.h
pub const NF_IP6_PRE_ROUTING: ::c_int = 0;
pub const NF_IP6_LOCAL_IN: ::c_int = 1;
pub const NF_IP6_FORWARD: ::c_int = 2;
pub const NF_IP6_LOCAL_OUT: ::c_int = 3;
pub const NF_IP6_POST_ROUTING: ::c_int = 4;
pub const NF_IP6_NUMHOOKS: ::c_int = 5;
pub const NF_IP6_PRI_FIRST: ::c_int = ::INT_MIN;
pub const NF_IP6_PRI_CONNTRACK_DEFRAG: ::c_int = -400;
pub const NF_IP6_PRI_RAW: ::c_int = -300;
pub const NF_IP6_PRI_SELINUX_FIRST: ::c_int = -225;
pub const NF_IP6_PRI_CONNTRACK: ::c_int = -200;
pub const NF_IP6_PRI_MANGLE: ::c_int = -150;
pub const NF_IP6_PRI_NAT_DST: ::c_int = -100;
pub const NF_IP6_PRI_FILTER: ::c_int = 0;
pub const NF_IP6_PRI_SECURITY: ::c_int = 50;
pub const NF_IP6_PRI_NAT_SRC: ::c_int = 100;
pub const NF_IP6_PRI_SELINUX_LAST: ::c_int = 225;
pub const NF_IP6_PRI_CONNTRACK_HELPER: ::c_int = 300;
pub const NF_IP6_PRI_LAST: ::c_int = ::INT_MAX;
// linux/netfilter_ipv6/ip6_tables.h
pub const IP6T_SO_ORIGINAL_DST: ::c_int = 80;
// linux/netfilter/nf_tables.h
pub const NFT_TABLE_MAXNAMELEN: ::c_int = 256;
pub const NFT_CHAIN_MAXNAMELEN: ::c_int = 256;
pub const NFT_SET_MAXNAMELEN: ::c_int = 256;
pub const NFT_OBJ_MAXNAMELEN: ::c_int = 256;
pub const NFT_USERDATA_MAXLEN: ::c_int = 256;
pub const NFT_REG_VERDICT: ::c_int = 0;
pub const NFT_REG_1: ::c_int = 1;
pub const NFT_REG_2: ::c_int = 2;
pub const NFT_REG_3: ::c_int = 3;
pub const NFT_REG_4: ::c_int = 4;
pub const __NFT_REG_MAX: ::c_int = 5;
pub const NFT_REG32_00: ::c_int = 8;
pub const NFT_REG32_01: ::c_int = 9;
pub const NFT_REG32_02: ::c_int = 10;
pub const NFT_REG32_03: ::c_int = 11;
pub const NFT_REG32_04: ::c_int = 12;
pub const NFT_REG32_05: ::c_int = 13;
pub const NFT_REG32_06: ::c_int = 14;
pub const NFT_REG32_07: ::c_int = 15;
pub const NFT_REG32_08: ::c_int = 16;
pub const NFT_REG32_09: ::c_int = 17;
pub const NFT_REG32_10: ::c_int = 18;
pub const NFT_REG32_11: ::c_int = 19;
pub const NFT_REG32_12: ::c_int = 20;
pub const NFT_REG32_13: ::c_int = 21;
pub const NFT_REG32_14: ::c_int = 22;
pub const NFT_REG32_15: ::c_int = 23;
pub const NFT_REG_SIZE: ::c_int = 16;
pub const NFT_REG32_SIZE: ::c_int = 4;
pub const NFT_CONTINUE: ::c_int = -1;
pub const NFT_BREAK: ::c_int = -2;
pub const NFT_JUMP: ::c_int = -3;
pub const NFT_GOTO: ::c_int = -4;
pub const NFT_RETURN: ::c_int = -5;
pub const NFT_MSG_NEWTABLE: ::c_int = 0;
pub const NFT_MSG_GETTABLE: ::c_int = 1;
pub const NFT_MSG_DELTABLE: ::c_int = 2;
pub const NFT_MSG_NEWCHAIN: ::c_int = 3;
pub const NFT_MSG_GETCHAIN: ::c_int = 4;
pub const NFT_MSG_DELCHAIN: ::c_int = 5;
pub const NFT_MSG_NEWRULE: ::c_int = 6;
pub const NFT_MSG_GETRULE: ::c_int = 7;
pub const NFT_MSG_DELRULE: ::c_int = 8;
pub const NFT_MSG_NEWSET: ::c_int = 9;
pub const NFT_MSG_GETSET: ::c_int = 10;
pub const NFT_MSG_DELSET: ::c_int = 11;
pub const NFT_MSG_NEWSETELEM: ::c_int = 12;
pub const NFT_MSG_GETSETELEM: ::c_int = 13;
pub const NFT_MSG_DELSETELEM: ::c_int = 14;
pub const NFT_MSG_NEWGEN: ::c_int = 15;
pub const NFT_MSG_GETGEN: ::c_int = 16;
pub const NFT_MSG_TRACE: ::c_int = 17;
pub const NFT_MSG_NEWOBJ: ::c_int = 18;
pub const NFT_MSG_GETOBJ: ::c_int = 19;
pub const NFT_MSG_DELOBJ: ::c_int = 20;
pub const NFT_MSG_GETOBJ_RESET: ::c_int = 21;
pub const NFT_MSG_MAX: ::c_int = 25;
pub const NFT_SET_ANONYMOUS: ::c_int = 0x1;
pub const NFT_SET_CONSTANT: ::c_int = 0x2;
pub const NFT_SET_INTERVAL: ::c_int = 0x4;
pub const NFT_SET_MAP: ::c_int = 0x8;
pub const NFT_SET_TIMEOUT: ::c_int = 0x10;
pub const NFT_SET_EVAL: ::c_int = 0x20;
pub const NFT_SET_POL_PERFORMANCE: ::c_int = 0;
pub const NFT_SET_POL_MEMORY: ::c_int = 1;
pub const NFT_SET_ELEM_INTERVAL_END: ::c_int = 0x1;
pub const NFT_DATA_VALUE: ::c_uint = 0;
pub const NFT_DATA_VERDICT: ::c_uint = 0xffffff00;
pub const NFT_DATA_RESERVED_MASK: ::c_uint = 0xffffff00;
pub const NFT_DATA_VALUE_MAXLEN: ::c_int = 64;
pub const NFT_BYTEORDER_NTOH: ::c_int = 0;
pub const NFT_BYTEORDER_HTON: ::c_int = 1;
pub const NFT_CMP_EQ: ::c_int = 0;
pub const NFT_CMP_NEQ: ::c_int = 1;
pub const NFT_CMP_LT: ::c_int = 2;
pub const NFT_CMP_LTE: ::c_int = 3;
pub const NFT_CMP_GT: ::c_int = 4;
pub const NFT_CMP_GTE: ::c_int = 5;
pub const NFT_RANGE_EQ: ::c_int = 0;
pub const NFT_RANGE_NEQ: ::c_int = 1;
pub const NFT_LOOKUP_F_INV: ::c_int = 1 << 0;
pub const NFT_DYNSET_OP_ADD: ::c_int = 0;
pub const NFT_DYNSET_OP_UPDATE: ::c_int = 1;
pub const NFT_DYNSET_F_INV: ::c_int = 1 << 0;
pub const NFT_PAYLOAD_LL_HEADER: ::c_int = 0;
pub const NFT_PAYLOAD_NETWORK_HEADER: ::c_int = 1;
pub const NFT_PAYLOAD_TRANSPORT_HEADER: ::c_int = 2;
pub const NFT_PAYLOAD_CSUM_NONE: ::c_int = 0;
pub const NFT_PAYLOAD_CSUM_INET: ::c_int = 1;
pub const NFT_META_LEN: ::c_int = 0;
pub const NFT_META_PROTOCOL: ::c_int = 1;
pub const NFT_META_PRIORITY: ::c_int = 2;
pub const NFT_META_MARK: ::c_int = 3;
pub const NFT_META_IIF: ::c_int = 4;
pub const NFT_META_OIF: ::c_int = 5;
pub const NFT_META_IIFNAME: ::c_int = 6;
pub const NFT_META_OIFNAME: ::c_int = 7;
pub const NFT_META_IIFTYPE: ::c_int = 8;
pub const NFT_META_OIFTYPE: ::c_int = 9;
pub const NFT_META_SKUID: ::c_int = 10;
pub const NFT_META_SKGID: ::c_int = 11;
pub const NFT_META_NFTRACE: ::c_int = 12;
pub const NFT_META_RTCLASSID: ::c_int = 13;
pub const NFT_META_SECMARK: ::c_int = 14;
pub const NFT_META_NFPROTO: ::c_int = 15;
pub const NFT_META_L4PROTO: ::c_int = 16;
pub const NFT_META_BRI_IIFNAME: ::c_int = 17;
pub const NFT_META_BRI_OIFNAME: ::c_int = 18;
pub const NFT_META_PKTTYPE: ::c_int = 19;
pub const NFT_META_CPU: ::c_int = 20;
pub const NFT_META_IIFGROUP: ::c_int = 21;
pub const NFT_META_OIFGROUP: ::c_int = 22;
pub const NFT_META_CGROUP: ::c_int = 23;
pub const NFT_META_PRANDOM: ::c_int = 24;
pub const NFT_CT_STATE: ::c_int = 0;
pub const NFT_CT_DIRECTION: ::c_int = 1;
pub const NFT_CT_STATUS: ::c_int = 2;
pub const NFT_CT_MARK: ::c_int = 3;
pub const NFT_CT_SECMARK: ::c_int = 4;
pub const NFT_CT_EXPIRATION: ::c_int = 5;
pub const NFT_CT_HELPER: ::c_int = 6;
pub const NFT_CT_L3PROTOCOL: ::c_int = 7;
pub const NFT_CT_SRC: ::c_int = 8;
pub const NFT_CT_DST: ::c_int = 9;
pub const NFT_CT_PROTOCOL: ::c_int = 10;
pub const NFT_CT_PROTO_SRC: ::c_int = 11;
pub const NFT_CT_PROTO_DST: ::c_int = 12;
pub const NFT_CT_LABELS: ::c_int = 13;
pub const NFT_CT_PKTS: ::c_int = 14;
pub const NFT_CT_BYTES: ::c_int = 15;
pub const NFT_LIMIT_PKTS: ::c_int = 0;
pub const NFT_LIMIT_PKT_BYTES: ::c_int = 1;
pub const NFT_LIMIT_F_INV: ::c_int = 1 << 0;
pub const NFT_QUEUE_FLAG_BYPASS: ::c_int = 0x01;
pub const NFT_QUEUE_FLAG_CPU_FANOUT: ::c_int = 0x02;
pub const NFT_QUEUE_FLAG_MASK: ::c_int = 0x03;
pub const NFT_QUOTA_F_INV: ::c_int = 1 << 0;
pub const NFT_REJECT_ICMP_UNREACH: ::c_int = 0;
pub const NFT_REJECT_TCP_RST: ::c_int = 1;
pub const NFT_REJECT_ICMPX_UNREACH: ::c_int = 2;
pub const NFT_REJECT_ICMPX_NO_ROUTE: ::c_int = 0;
pub const NFT_REJECT_ICMPX_PORT_UNREACH: ::c_int = 1;
pub const NFT_REJECT_ICMPX_HOST_UNREACH: ::c_int = 2;
pub const NFT_REJECT_ICMPX_ADMIN_PROHIBITED: ::c_int = 3;
pub const NFT_NAT_SNAT: ::c_int = 0;
pub const NFT_NAT_DNAT: ::c_int = 1;
pub const NFT_TRACETYPE_UNSPEC: ::c_int = 0;
pub const NFT_TRACETYPE_POLICY: ::c_int = 1;
pub const NFT_TRACETYPE_RETURN: ::c_int = 2;
pub const NFT_TRACETYPE_RULE: ::c_int = 3;
pub const NFT_NG_INCREMENTAL: ::c_int = 0;
pub const NFT_NG_RANDOM: ::c_int = 1;
pub const IFF_TUN: ::c_int = 0x0001;
pub const IFF_TAP: ::c_int = 0x0002;
pub const IFF_NO_PI: ::c_int = 0x1000;
// start android/platform/bionic/libc/kernel/uapi/linux/if_ether.h
// from https://android.googlesource.com/
// platform/bionic/+/master/libc/kernel/uapi/linux/if_ether.h
pub const ETH_ALEN: ::c_int = 6;
pub const ETH_HLEN: ::c_int = 14;
pub const ETH_ZLEN: ::c_int = 60;
pub const ETH_DATA_LEN: ::c_int = 1500;
pub const ETH_FRAME_LEN: ::c_int = 1514;
pub const ETH_FCS_LEN: ::c_int = 4;
pub const ETH_MIN_MTU: ::c_int = 68;
pub const ETH_MAX_MTU: ::c_int = 0xFFFF;
pub const ETH_P_LOOP: ::c_int = 0x0060;
pub const ETH_P_PUP: ::c_int = 0x0200;
pub const ETH_P_PUPAT: ::c_int = 0x0201;
pub const ETH_P_TSN: ::c_int = 0x22F0;
pub const ETH_P_IP: ::c_int = 0x0800;
pub const ETH_P_X25: ::c_int = 0x0805;
pub const ETH_P_ARP: ::c_int = 0x0806;
pub const ETH_P_BPQ: ::c_int = 0x08FF;
pub const ETH_P_IEEEPUP: ::c_int = 0x0a00;
pub const ETH_P_IEEEPUPAT: ::c_int = 0x0a01;
pub const ETH_P_BATMAN: ::c_int = 0x4305;
pub const ETH_P_DEC: ::c_int = 0x6000;
pub const ETH_P_DNA_DL: ::c_int = 0x6001;
pub const ETH_P_DNA_RC: ::c_int = 0x6002;
pub const ETH_P_DNA_RT: ::c_int = 0x6003;
pub const ETH_P_LAT: ::c_int = 0x6004;
pub const ETH_P_DIAG: ::c_int = 0x6005;
pub const ETH_P_CUST: ::c_int = 0x6006;
pub const ETH_P_SCA: ::c_int = 0x6007;
pub const ETH_P_TEB: ::c_int = 0x6558;
pub const ETH_P_RARP: ::c_int = 0x8035;
pub const ETH_P_ATALK: ::c_int = 0x809B;
pub const ETH_P_AARP: ::c_int = 0x80F3;
pub const ETH_P_8021Q: ::c_int = 0x8100;
/* see rust-lang/libc#924 pub const ETH_P_ERSPAN: ::c_int = 0x88BE;*/
pub const ETH_P_IPX: ::c_int = 0x8137;
pub const ETH_P_IPV6: ::c_int = 0x86DD;
pub const ETH_P_PAUSE: ::c_int = 0x8808;
pub const ETH_P_SLOW: ::c_int = 0x8809;
pub const ETH_P_WCCP: ::c_int = 0x883E;
pub const ETH_P_MPLS_UC: ::c_int = 0x8847;
pub const ETH_P_MPLS_MC: ::c_int = 0x8848;
pub const ETH_P_ATMMPOA: ::c_int = 0x884c;
pub const ETH_P_PPP_DISC: ::c_int = 0x8863;
pub const ETH_P_PPP_SES: ::c_int = 0x8864;
pub const ETH_P_LINK_CTL: ::c_int = 0x886c;
pub const ETH_P_ATMFATE: ::c_int = 0x8884;
pub const ETH_P_PAE: ::c_int = 0x888E;
pub const ETH_P_AOE: ::c_int = 0x88A2;
pub const ETH_P_8021AD: ::c_int = 0x88A8;
pub const ETH_P_802_EX1: ::c_int = 0x88B5;
pub const ETH_P_TIPC: ::c_int = 0x88CA;
pub const ETH_P_MACSEC: ::c_int = 0x88E5;
pub const ETH_P_8021AH: ::c_int = 0x88E7;
pub const ETH_P_MVRP: ::c_int = 0x88F5;
pub const ETH_P_1588: ::c_int = 0x88F7;
pub const ETH_P_NCSI: ::c_int = 0x88F8;
pub const ETH_P_PRP: ::c_int = 0x88FB;
pub const ETH_P_FCOE: ::c_int = 0x8906;
/* see rust-lang/libc#924 pub const ETH_P_IBOE: ::c_int = 0x8915;*/
pub const ETH_P_TDLS: ::c_int = 0x890D;
pub const ETH_P_FIP: ::c_int = 0x8914;
pub const ETH_P_80221: ::c_int = 0x8917;
pub const ETH_P_HSR: ::c_int = 0x892F;
/* see rust-lang/libc#924 pub const ETH_P_NSH: ::c_int = 0x894F;*/
pub const ETH_P_LOOPBACK: ::c_int = 0x9000;
pub const ETH_P_QINQ1: ::c_int = 0x9100;
pub const ETH_P_QINQ2: ::c_int = 0x9200;
pub const ETH_P_QINQ3: ::c_int = 0x9300;
pub const ETH_P_EDSA: ::c_int = 0xDADA;
/* see rust-lang/libc#924 pub const ETH_P_IFE: ::c_int = 0xED3E;*/
pub const ETH_P_AF_IUCV: ::c_int = 0xFBFB;
pub const ETH_P_802_3_MIN: ::c_int = 0x0600;
pub const ETH_P_802_3: ::c_int = 0x0001;
pub const ETH_P_AX25: ::c_int = 0x0002;
pub const ETH_P_ALL: ::c_int = 0x0003;
pub const ETH_P_802_2: ::c_int = 0x0004;
pub const ETH_P_SNAP: ::c_int = 0x0005;
pub const ETH_P_DDCMP: ::c_int = 0x0006;
pub const ETH_P_WAN_PPP: ::c_int = 0x0007;
pub const ETH_P_PPP_MP: ::c_int = 0x0008;
pub const ETH_P_LOCALTALK: ::c_int = 0x0009;
pub const ETH_P_CAN: ::c_int = 0x000C;
pub const ETH_P_CANFD: ::c_int = 0x000D;
pub const ETH_P_PPPTALK: ::c_int = 0x0010;
pub const ETH_P_TR_802_2: ::c_int = 0x0011;
pub const ETH_P_MOBITEX: ::c_int = 0x0015;
pub const ETH_P_CONTROL: ::c_int = 0x0016;
pub const ETH_P_IRDA: ::c_int = 0x0017;
pub const ETH_P_ECONET: ::c_int = 0x0018;
pub const ETH_P_HDLC: ::c_int = 0x0019;
pub const ETH_P_ARCNET: ::c_int = 0x001A;
pub const ETH_P_DSA: ::c_int = 0x001B;
pub const ETH_P_TRAILER: ::c_int = 0x001C;
pub const ETH_P_PHONET: ::c_int = 0x00F5;
pub const ETH_P_IEEE802154: ::c_int = 0x00F6;
pub const ETH_P_CAIF: ::c_int = 0x00F7;
pub const ETH_P_XDSA: ::c_int = 0x00F8;
/* see rust-lang/libc#924 pub const ETH_P_MAP: ::c_int = 0x00F9;*/
// end android/platform/bionic/libc/kernel/uapi/linux/if_ether.h
pub const SIOCADDRT: ::c_ulong = 0x0000890B;
pub const SIOCDELRT: ::c_ulong = 0x0000890C;
pub const SIOCGIFNAME: ::c_ulong = 0x00008910;
pub const SIOCSIFLINK: ::c_ulong = 0x00008911;
pub const SIOCGIFCONF: ::c_ulong = 0x00008912;
pub const SIOCGIFFLAGS: ::c_ulong = 0x00008913;
pub const SIOCSIFFLAGS: ::c_ulong = 0x00008914;
pub const SIOCGIFADDR: ::c_ulong = 0x00008915;
pub const SIOCSIFADDR: ::c_ulong = 0x00008916;
pub const SIOCGIFDSTADDR: ::c_ulong = 0x00008917;
pub const SIOCSIFDSTADDR: ::c_ulong = 0x00008918;
pub const SIOCGIFBRDADDR: ::c_ulong = 0x00008919;
pub const SIOCSIFBRDADDR: ::c_ulong = 0x0000891A;
pub const SIOCGIFNETMASK: ::c_ulong = 0x0000891B;
pub const SIOCSIFNETMASK: ::c_ulong = 0x0000891C;
pub const SIOCGIFMETRIC: ::c_ulong = 0x0000891D;
pub const SIOCSIFMETRIC: ::c_ulong = 0x0000891E;
pub const SIOCGIFMEM: ::c_ulong = 0x0000891F;
pub const SIOCSIFMEM: ::c_ulong = 0x00008920;
pub const SIOCGIFMTU: ::c_ulong = 0x00008921;
pub const SIOCSIFMTU: ::c_ulong = 0x00008922;
pub const SIOCSIFHWADDR: ::c_ulong = 0x00008924;
pub const SIOCGIFENCAP: ::c_ulong = 0x00008925;
pub const SIOCSIFENCAP: ::c_ulong = 0x00008926;
pub const SIOCGIFHWADDR: ::c_ulong = 0x00008927;
pub const SIOCGIFSLAVE: ::c_ulong = 0x00008929;
pub const SIOCSIFSLAVE: ::c_ulong = 0x00008930;
pub const SIOCADDMULTI: ::c_ulong = 0x00008931;
pub const SIOCDELMULTI: ::c_ulong = 0x00008932;
pub const SIOCDARP: ::c_ulong = 0x00008953;
pub const SIOCGARP: ::c_ulong = 0x00008954;
pub const SIOCSARP: ::c_ulong = 0x00008955;
pub const SIOCDRARP: ::c_ulong = 0x00008960;
pub const SIOCGRARP: ::c_ulong = 0x00008961;
pub const SIOCSRARP: ::c_ulong = 0x00008962;
pub const SIOCGIFMAP: ::c_ulong = 0x00008970;
pub const SIOCSIFMAP: ::c_ulong = 0x00008971;
// linux/module.h
pub const MODULE_INIT_IGNORE_MODVERSIONS: ::c_uint = 0x0001;
pub const MODULE_INIT_IGNORE_VERMAGIC: ::c_uint = 0x0002;
#[deprecated(
since = "0.2.55",
note = "ENOATTR is not available on Android; use ENODATA instead"
)]
pub const ENOATTR: ::c_int = ::ENODATA;
// linux/if_alg.h
pub const ALG_SET_KEY: ::c_int = 1;
pub const ALG_SET_IV: ::c_int = 2;
pub const ALG_SET_OP: ::c_int = 3;
pub const ALG_SET_AEAD_ASSOCLEN: ::c_int = 4;
pub const ALG_SET_AEAD_AUTHSIZE: ::c_int = 5;
pub const ALG_OP_DECRYPT: ::c_int = 0;
pub const ALG_OP_ENCRYPT: ::c_int = 1;
// uapi/linux/vm_sockets.h
pub const VMADDR_CID_ANY: ::c_uint = 0xFFFFFFFF;
pub const VMADDR_CID_HYPERVISOR: ::c_uint = 0;
pub const VMADDR_CID_LOCAL: ::c_uint = 1;
pub const VMADDR_CID_HOST: ::c_uint = 2;
pub const VMADDR_PORT_ANY: ::c_uint = 0xFFFFFFFF;
// uapi/linux/inotify.h
pub const IN_ACCESS: u32 = 0x0000_0001;
pub const IN_MODIFY: u32 = 0x0000_0002;
pub const IN_ATTRIB: u32 = 0x0000_0004;
pub const IN_CLOSE_WRITE: u32 = 0x0000_0008;
pub const IN_CLOSE_NOWRITE: u32 = 0x0000_0010;
pub const IN_CLOSE: u32 = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE;
pub const IN_OPEN: u32 = 0x0000_0020;
pub const IN_MOVED_FROM: u32 = 0x0000_0040;
pub const IN_MOVED_TO: u32 = 0x0000_0080;
pub const IN_MOVE: u32 = IN_MOVED_FROM | IN_MOVED_TO;
pub const IN_CREATE: u32 = 0x0000_0100;
pub const IN_DELETE: u32 = 0x0000_0200;
pub const IN_DELETE_SELF: u32 = 0x0000_0400;
pub const IN_MOVE_SELF: u32 = 0x0000_0800;
pub const IN_UNMOUNT: u32 = 0x0000_2000;
pub const IN_Q_OVERFLOW: u32 = 0x0000_4000;
pub const IN_IGNORED: u32 = 0x0000_8000;
pub const IN_ONLYDIR: u32 = 0x0100_0000;
pub const IN_DONT_FOLLOW: u32 = 0x0200_0000;
// pub const IN_EXCL_UNLINK: u32 = 0x0400_0000;
// pub const IN_MASK_CREATE: u32 = 0x1000_0000;
// pub const IN_MASK_ADD: u32 = 0x2000_0000;
pub const IN_ISDIR: u32 = 0x4000_0000;
pub const IN_ONESHOT: u32 = 0x8000_0000;
pub const IN_ALL_EVENTS: u32 = IN_ACCESS
| IN_MODIFY
| IN_ATTRIB
| IN_CLOSE_WRITE
| IN_CLOSE_NOWRITE
| IN_OPEN
| IN_MOVED_FROM
| IN_MOVED_TO
| IN_DELETE
| IN_CREATE
| IN_DELETE_SELF
| IN_MOVE_SELF;
pub const IN_CLOEXEC: ::c_int = O_CLOEXEC;
pub const IN_NONBLOCK: ::c_int = O_NONBLOCK;
pub const FUTEX_WAIT: ::c_int = 0;
pub const FUTEX_WAKE: ::c_int = 1;
pub const FUTEX_FD: ::c_int = 2;
pub const FUTEX_REQUEUE: ::c_int = 3;
pub const FUTEX_CMP_REQUEUE: ::c_int = 4;
pub const FUTEX_WAKE_OP: ::c_int = 5;
pub const FUTEX_LOCK_PI: ::c_int = 6;
pub const FUTEX_UNLOCK_PI: ::c_int = 7;
pub const FUTEX_TRYLOCK_PI: ::c_int = 8;
pub const FUTEX_WAIT_BITSET: ::c_int = 9;
pub const FUTEX_WAKE_BITSET: ::c_int = 10;
pub const FUTEX_WAIT_REQUEUE_PI: ::c_int = 11;
pub const FUTEX_CMP_REQUEUE_PI: ::c_int = 12;
pub const FUTEX_PRIVATE_FLAG: ::c_int = 128;
pub const FUTEX_CLOCK_REALTIME: ::c_int = 256;
pub const FUTEX_CMD_MASK: ::c_int = !(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME);
// linux/errqueue.h
pub const SO_EE_ORIGIN_NONE: u8 = 0;
pub const SO_EE_ORIGIN_LOCAL: u8 = 1;
pub const SO_EE_ORIGIN_ICMP: u8 = 2;
pub const SO_EE_ORIGIN_ICMP6: u8 = 3;
pub const SO_EE_ORIGIN_TXSTATUS: u8 = 4;
pub const SO_EE_ORIGIN_TIMESTAMPING: u8 = SO_EE_ORIGIN_TXSTATUS;
// errno.h
pub const EPERM: ::c_int = 1;
pub const ENOENT: ::c_int = 2;
pub const ESRCH: ::c_int = 3;
pub const EINTR: ::c_int = 4;
pub const EIO: ::c_int = 5;
pub const ENXIO: ::c_int = 6;
pub const E2BIG: ::c_int = 7;
pub const ENOEXEC: ::c_int = 8;
pub const EBADF: ::c_int = 9;
pub const ECHILD: ::c_int = 10;
pub const EAGAIN: ::c_int = 11;
pub const ENOMEM: ::c_int = 12;
pub const EACCES: ::c_int = 13;
pub const EFAULT: ::c_int = 14;
pub const ENOTBLK: ::c_int = 15;
pub const EBUSY: ::c_int = 16;
pub const EEXIST: ::c_int = 17;
pub const EXDEV: ::c_int = 18;
pub const ENODEV: ::c_int = 19;
pub const ENOTDIR: ::c_int = 20;
pub const EISDIR: ::c_int = 21;
pub const EINVAL: ::c_int = 22;
pub const ENFILE: ::c_int = 23;
pub const EMFILE: ::c_int = 24;
pub const ENOTTY: ::c_int = 25;
pub const ETXTBSY: ::c_int = 26;
pub const EFBIG: ::c_int = 27;
pub const ENOSPC: ::c_int = 28;
pub const ESPIPE: ::c_int = 29;
pub const EROFS: ::c_int = 30;
pub const EMLINK: ::c_int = 31;
pub const EPIPE: ::c_int = 32;
pub const EDOM: ::c_int = 33;
pub const ERANGE: ::c_int = 34;
pub const EWOULDBLOCK: ::c_int = EAGAIN;
pub const PRIO_PROCESS: ::c_int = 0;
pub const PRIO_PGRP: ::c_int = 1;
pub const PRIO_USER: ::c_int = 2;
// linux/sched.h
pub const SCHED_NORMAL: ::c_int = 0;
pub const SCHED_FIFO: ::c_int = 1;
pub const SCHED_RR: ::c_int = 2;
pub const SCHED_BATCH: ::c_int = 3;
pub const SCHED_IDLE: ::c_int = 5;
pub const SCHED_DEADLINE: ::c_int = 6;
pub const SCHED_RESET_ON_FORK: ::c_int = 0x40000000;
pub const CLONE_PIDFD: ::c_int = 0x1000;
// bits/seek_constants.h
pub const SEEK_DATA: ::c_int = 3;
pub const SEEK_HOLE: ::c_int = 4;
// sys/socket.h
pub const AF_NFC: ::c_int = 39;
pub const AF_VSOCK: ::c_int = 40;
pub const PF_NFC: ::c_int = AF_NFC;
pub const PF_VSOCK: ::c_int = AF_VSOCK;
// sys/system_properties.h
pub const PROP_VALUE_MAX: ::c_int = 92;
pub const PROP_NAME_MAX: ::c_int = 32;
// sys/prctl.h
pub const PR_SET_VMA: ::c_int = 0x53564d41;
pub const PR_SET_VMA_ANON_NAME: ::c_int = 0;
f! {
pub fn CMSG_NXTHDR(mhdr: *const msghdr,
cmsg: *const cmsghdr) -> *mut cmsghdr {
let next = (cmsg as usize
+ super::CMSG_ALIGN((*cmsg).cmsg_len as usize))
as *mut cmsghdr;
let max = (*mhdr).msg_control as usize
+ (*mhdr).msg_controllen as usize;
if (next.offset(1)) as usize > max {
0 as *mut cmsghdr
} else {
next as *mut cmsghdr
}
}
pub fn CPU_ALLOC_SIZE(count: ::c_int) -> ::size_t {
let _dummy: cpu_set_t = ::mem::zeroed();
let size_in_bits = 8 * ::mem::size_of_val(&_dummy.__bits[0]);
((count as ::size_t + size_in_bits - 1) / 8) as ::size_t
}
pub fn CPU_ZERO(cpuset: &mut cpu_set_t) -> () {
for slot in cpuset.__bits.iter_mut() {
*slot = 0;
}
}
pub fn CPU_SET(cpu: usize, cpuset: &mut cpu_set_t) -> () {
let size_in_bits
= 8 * ::mem::size_of_val(&cpuset.__bits[0]); // 32, 64 etc
let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits);
cpuset.__bits[idx] |= 1 << offset;
()
}
pub fn CPU_CLR(cpu: usize, cpuset: &mut cpu_set_t) -> () {
let size_in_bits
= 8 * ::mem::size_of_val(&cpuset.__bits[0]); // 32, 64 etc
let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits);
cpuset.__bits[idx] &= !(1 << offset);
()
}
pub fn CPU_ISSET(cpu: usize, cpuset: &cpu_set_t) -> bool {
let size_in_bits = 8 * ::mem::size_of_val(&cpuset.__bits[0]);
let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits);
0 != (cpuset.__bits[idx] & (1 << offset))
}
pub fn CPU_COUNT_S(size: usize, cpuset: &cpu_set_t) -> ::c_int {
let mut s: u32 = 0;
let size_of_mask = ::mem::size_of_val(&cpuset.__bits[0]);
for i in cpuset.__bits[..(size / size_of_mask)].iter() {
s += i.count_ones();
};
s as ::c_int
}
pub fn CPU_COUNT(cpuset: &cpu_set_t) -> ::c_int {
CPU_COUNT_S(::mem::size_of::<cpu_set_t>(), cpuset)
}
pub fn CPU_EQUAL(set1: &cpu_set_t, set2: &cpu_set_t) -> bool {
set1.__bits == set2.__bits
}
pub fn major(dev: ::dev_t) -> ::c_int {
((dev >> 8) & 0xfff) as ::c_int
}
pub fn minor(dev: ::dev_t) -> ::c_int {
((dev & 0xff) | ((dev >> 12) & 0xfff00)) as ::c_int
}
pub fn makedev(ma: ::c_int, mi: ::c_int) -> ::dev_t {
let ma = ma as ::dev_t;
let mi = mi as ::dev_t;
((ma & 0xfff) << 8) | (mi & 0xff) | ((mi & 0xfff00) << 12)
}
pub fn NLA_ALIGN(len: ::c_int) -> ::c_int {
return ((len) + NLA_ALIGNTO - 1) & !(NLA_ALIGNTO - 1)
}
pub fn SO_EE_OFFENDER(ee: *const ::sock_extended_err) -> *mut ::sockaddr {
ee.offset(1) as *mut ::sockaddr
}
}
extern "C" {
pub fn getrlimit64(resource: ::c_int, rlim: *mut rlimit64) -> ::c_int;
pub fn setrlimit64(resource: ::c_int, rlim: *const rlimit64) -> ::c_int;
pub fn getrlimit(resource: ::c_int, rlim: *mut ::rlimit) -> ::c_int;
pub fn setrlimit(resource: ::c_int, rlim: *const ::rlimit) -> ::c_int;
pub fn prlimit(
pid: ::pid_t,
resource: ::c_int,
new_limit: *const ::rlimit,
old_limit: *mut ::rlimit,
) -> ::c_int;
pub fn prlimit64(
pid: ::pid_t,
resource: ::c_int,
new_limit: *const ::rlimit64,
old_limit: *mut ::rlimit64,
) -> ::c_int;
pub fn strerror_r(errnum: ::c_int, buf: *mut c_char, buflen: ::size_t) -> ::c_int;
pub fn gettimeofday(tp: *mut ::timeval, tz: *mut ::timezone) -> ::c_int;
pub fn madvise(addr: *mut ::c_void, len: ::size_t, advice: ::c_int) -> ::c_int;
pub fn ioctl(fd: ::c_int, request: ::c_int, ...) -> ::c_int;
pub fn msync(addr: *mut ::c_void, len: ::size_t, flags: ::c_int) -> ::c_int;
pub fn mprotect(addr: *mut ::c_void, len: ::size_t, prot: ::c_int) -> ::c_int;
pub fn recvfrom(
socket: ::c_int,
buf: *mut ::c_void,
len: ::size_t,
flags: ::c_int,
addr: *mut ::sockaddr,
addrlen: *mut ::socklen_t,
) -> ::ssize_t;
pub fn getnameinfo(
sa: *const ::sockaddr,
salen: ::socklen_t,
host: *mut ::c_char,
hostlen: ::size_t,
serv: *mut ::c_char,
sevlen: ::size_t,
flags: ::c_int,
) -> ::c_int;
pub fn preadv(fd: ::c_int, iov: *const ::iovec, count: ::c_int, offset: ::off_t) -> ::ssize_t;
pub fn pwritev(fd: ::c_int, iov: *const ::iovec, count: ::c_int, offset: ::off_t) -> ::ssize_t;
pub fn process_vm_readv(
pid: ::pid_t,
local_iov: *const ::iovec,
local_iov_count: ::c_ulong,
remote_iov: *const ::iovec,
remote_iov_count: ::c_ulong,
flags: ::c_ulong,
) -> ::ssize_t;
pub fn process_vm_writev(
pid: ::pid_t,
local_iov: *const ::iovec,
local_iov_count: ::c_ulong,
remote_iov: *const ::iovec,
remote_iov_count: ::c_ulong,
flags: ::c_ulong,
) -> ::ssize_t;
pub fn ptrace(request: ::c_int, ...) -> ::c_long;
pub fn getpriority(which: ::c_int, who: ::id_t) -> ::c_int;
pub fn setpriority(which: ::c_int, who: ::id_t, prio: ::c_int) -> ::c_int;
pub fn __sched_cpualloc(count: ::size_t) -> *mut ::cpu_set_t;
pub fn __sched_cpufree(set: *mut ::cpu_set_t);
pub fn __sched_cpucount(setsize: ::size_t, set: *const cpu_set_t) -> ::c_int;
pub fn sched_getcpu() -> ::c_int;
pub fn mallinfo() -> ::mallinfo;
// available from API 23
pub fn malloc_info(options: ::c_int, stream: *mut ::FILE) -> ::c_int;
pub fn malloc_usable_size(ptr: *const ::c_void) -> ::size_t;
pub fn utmpname(name: *const ::c_char) -> ::c_int;
pub fn setutent();
pub fn getutent() -> *mut utmp;
pub fn seekdir(dirp: *mut ::DIR, loc: ::c_long);
pub fn telldir(dirp: *mut ::DIR) -> ::c_long;
pub fn fallocate(fd: ::c_int, mode: ::c_int, offset: ::off_t, len: ::off_t) -> ::c_int;
pub fn fallocate64(fd: ::c_int, mode: ::c_int, offset: ::off64_t, len: ::off64_t) -> ::c_int;
pub fn posix_fallocate(fd: ::c_int, offset: ::off_t, len: ::off_t) -> ::c_int;
pub fn posix_fallocate64(fd: ::c_int, offset: ::off64_t, len: ::off64_t) -> ::c_int;
pub fn getxattr(
path: *const c_char,
name: *const c_char,
value: *mut ::c_void,
size: ::size_t,
) -> ::ssize_t;
pub fn lgetxattr(
path: *const c_char,
name: *const c_char,
value: *mut ::c_void,
size: ::size_t,
) -> ::ssize_t;
pub fn fgetxattr(
filedes: ::c_int,
name: *const c_char,
value: *mut ::c_void,
size: ::size_t,
) -> ::ssize_t;
pub fn setxattr(
path: *const c_char,
name: *const c_char,
value: *const ::c_void,
size: ::size_t,
flags: ::c_int,
) -> ::c_int;
pub fn lsetxattr(
path: *const c_char,
name: *const c_char,
value: *const ::c_void,
size: ::size_t,
flags: ::c_int,
) -> ::c_int;
pub fn fsetxattr(
filedes: ::c_int,
name: *const c_char,
value: *const ::c_void,
size: ::size_t,
flags: ::c_int,
) -> ::c_int;
pub fn listxattr(path: *const c_char, list: *mut c_char, size: ::size_t) -> ::ssize_t;
pub fn llistxattr(path: *const c_char, list: *mut c_char, size: ::size_t) -> ::ssize_t;
pub fn flistxattr(filedes: ::c_int, list: *mut c_char, size: ::size_t) -> ::ssize_t;
pub fn removexattr(path: *const c_char, name: *const c_char) -> ::c_int;
pub fn lremovexattr(path: *const c_char, name: *const c_char) -> ::c_int;
pub fn fremovexattr(filedes: ::c_int, name: *const c_char) -> ::c_int;
pub fn signalfd(fd: ::c_int, mask: *const ::sigset_t, flags: ::c_int) -> ::c_int;
pub fn timerfd_create(clock: ::clockid_t, flags: ::c_int) -> ::c_int;
pub fn timerfd_gettime(fd: ::c_int, current_value: *mut itimerspec) -> ::c_int;
pub fn timerfd_settime(
fd: ::c_int,
flags: ::c_int,
new_value: *const itimerspec,
old_value: *mut itimerspec,
) -> ::c_int;
pub fn syscall(num: ::c_long, ...) -> ::c_long;
pub fn sched_getaffinity(pid: ::pid_t, cpusetsize: ::size_t, cpuset: *mut cpu_set_t)
-> ::c_int;
pub fn sched_setaffinity(
pid: ::pid_t,
cpusetsize: ::size_t,
cpuset: *const cpu_set_t,
) -> ::c_int;
pub fn epoll_create(size: ::c_int) -> ::c_int;
pub fn epoll_create1(flags: ::c_int) -> ::c_int;
pub fn epoll_wait(
epfd: ::c_int,
events: *mut ::epoll_event,
maxevents: ::c_int,
timeout: ::c_int,
) -> ::c_int;
pub fn epoll_ctl(epfd: ::c_int, op: ::c_int, fd: ::c_int, event: *mut ::epoll_event)
-> ::c_int;
pub fn pthread_getschedparam(
native: ::pthread_t,
policy: *mut ::c_int,
param: *mut ::sched_param,
) -> ::c_int;
pub fn unshare(flags: ::c_int) -> ::c_int;
pub fn umount(target: *const ::c_char) -> ::c_int;
pub fn sched_get_priority_max(policy: ::c_int) -> ::c_int;
pub fn tee(fd_in: ::c_int, fd_out: ::c_int, len: ::size_t, flags: ::c_uint) -> ::ssize_t;
pub fn settimeofday(tv: *const ::timeval, tz: *const ::timezone) -> ::c_int;
pub fn splice(
fd_in: ::c_int,
off_in: *mut ::loff_t,
fd_out: ::c_int,
off_out: *mut ::loff_t,
len: ::size_t,
flags: ::c_uint,
) -> ::ssize_t;
pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int;
pub fn sched_rr_get_interval(pid: ::pid_t, tp: *mut ::timespec) -> ::c_int;
pub fn sem_timedwait(sem: *mut sem_t, abstime: *const ::timespec) -> ::c_int;
pub fn sem_getvalue(sem: *mut sem_t, sval: *mut ::c_int) -> ::c_int;
pub fn sched_setparam(pid: ::pid_t, param: *const ::sched_param) -> ::c_int;
pub fn setns(fd: ::c_int, nstype: ::c_int) -> ::c_int;
pub fn swapoff(puath: *const ::c_char) -> ::c_int;
pub fn vmsplice(
fd: ::c_int,
iov: *const ::iovec,
nr_segs: ::size_t,
flags: ::c_uint,
) -> ::ssize_t;
pub fn mount(
src: *const ::c_char,
target: *const ::c_char,
fstype: *const ::c_char,
flags: ::c_ulong,
data: *const ::c_void,
) -> ::c_int;
pub fn personality(persona: ::c_uint) -> ::c_int;
pub fn prctl(option: ::c_int, ...) -> ::c_int;
pub fn sched_getparam(pid: ::pid_t, param: *mut ::sched_param) -> ::c_int;
pub fn ppoll(
fds: *mut ::pollfd,
nfds: nfds_t,
timeout: *const ::timespec,
sigmask: *const sigset_t,
) -> ::c_int;
pub fn pthread_mutex_timedlock(
lock: *mut pthread_mutex_t,
abstime: *const ::timespec,
) -> ::c_int;
pub fn pthread_barrierattr_init(attr: *mut ::pthread_barrierattr_t) -> ::c_int;
pub fn pthread_barrierattr_destroy(attr: *mut ::pthread_barrierattr_t) -> ::c_int;
pub fn pthread_barrierattr_getpshared(
attr: *const ::pthread_barrierattr_t,
shared: *mut ::c_int,
) -> ::c_int;
pub fn pthread_barrierattr_setpshared(
attr: *mut ::pthread_barrierattr_t,
shared: ::c_int,
) -> ::c_int;
pub fn pthread_barrier_init(
barrier: *mut pthread_barrier_t,
attr: *const ::pthread_barrierattr_t,
count: ::c_uint,
) -> ::c_int;
pub fn pthread_barrier_destroy(barrier: *mut pthread_barrier_t) -> ::c_int;
pub fn pthread_barrier_wait(barrier: *mut pthread_barrier_t) -> ::c_int;
pub fn pthread_spin_init(lock: *mut ::pthread_spinlock_t, pshared: ::c_int) -> ::c_int;
pub fn pthread_spin_destroy(lock: *mut ::pthread_spinlock_t) -> ::c_int;
pub fn pthread_spin_lock(lock: *mut ::pthread_spinlock_t) -> ::c_int;
pub fn pthread_spin_trylock(lock: *mut ::pthread_spinlock_t) -> ::c_int;
pub fn pthread_spin_unlock(lock: *mut ::pthread_spinlock_t) -> ::c_int;
pub fn clone(
cb: extern "C" fn(*mut ::c_void) -> ::c_int,
child_stack: *mut ::c_void,
flags: ::c_int,
arg: *mut ::c_void,
...
) -> ::c_int;
pub fn sched_getscheduler(pid: ::pid_t) -> ::c_int;
pub fn clock_nanosleep(
clk_id: ::clockid_t,
flags: ::c_int,
rqtp: *const ::timespec,
rmtp: *mut ::timespec,
) -> ::c_int;
pub fn pthread_attr_getguardsize(
attr: *const ::pthread_attr_t,
guardsize: *mut ::size_t,
) -> ::c_int;
pub fn sethostname(name: *const ::c_char, len: ::size_t) -> ::c_int;
pub fn sched_get_priority_min(policy: ::c_int) -> ::c_int;
pub fn pthread_condattr_getpshared(
attr: *const pthread_condattr_t,
pshared: *mut ::c_int,
) -> ::c_int;
pub fn sysinfo(info: *mut ::sysinfo) -> ::c_int;
pub fn umount2(target: *const ::c_char, flags: ::c_int) -> ::c_int;
pub fn pthread_setschedparam(
native: ::pthread_t,
policy: ::c_int,
param: *const ::sched_param,
) -> ::c_int;
pub fn swapon(path: *const ::c_char, swapflags: ::c_int) -> ::c_int;
pub fn sched_setscheduler(
pid: ::pid_t,
policy: ::c_int,
param: *const ::sched_param,
) -> ::c_int;
pub fn sendfile(
out_fd: ::c_int,
in_fd: ::c_int,
offset: *mut off_t,
count: ::size_t,
) -> ::ssize_t;
pub fn setfsgid(gid: ::gid_t) -> ::c_int;
pub fn setfsuid(uid: ::uid_t) -> ::c_int;
pub fn sigsuspend(mask: *const ::sigset_t) -> ::c_int;
pub fn getgrgid_r(
gid: ::gid_t,
grp: *mut ::group,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut ::group,
) -> ::c_int;
pub fn sigaltstack(ss: *const stack_t, oss: *mut stack_t) -> ::c_int;
pub fn sem_close(sem: *mut sem_t) -> ::c_int;
pub fn getgrnam_r(
name: *const ::c_char,
grp: *mut ::group,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut ::group,
) -> ::c_int;
pub fn pthread_sigmask(how: ::c_int, set: *const sigset_t, oldset: *mut sigset_t) -> ::c_int;
pub fn sem_open(name: *const ::c_char, oflag: ::c_int, ...) -> *mut sem_t;
pub fn getgrnam(name: *const ::c_char) -> *mut ::group;
pub fn pthread_kill(thread: ::pthread_t, sig: ::c_int) -> ::c_int;
pub fn sem_unlink(name: *const ::c_char) -> ::c_int;
pub fn daemon(nochdir: ::c_int, noclose: ::c_int) -> ::c_int;
pub fn getpwnam_r(
name: *const ::c_char,
pwd: *mut passwd,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut passwd,
) -> ::c_int;
pub fn getpwuid_r(
uid: ::uid_t,
pwd: *mut passwd,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut passwd,
) -> ::c_int;
pub fn sigtimedwait(
set: *const sigset_t,
info: *mut siginfo_t,
timeout: *const ::timespec,
) -> ::c_int;
pub fn sigwait(set: *const sigset_t, sig: *mut ::c_int) -> ::c_int;
pub fn pthread_atfork(
prepare: ::Option<unsafe extern "C" fn()>,
parent: ::Option<unsafe extern "C" fn()>,
child: ::Option<unsafe extern "C" fn()>,
) -> ::c_int;
pub fn getgrgid(gid: ::gid_t) -> *mut ::group;
pub fn getgrouplist(
user: *const ::c_char,
group: ::gid_t,
groups: *mut ::gid_t,
ngroups: *mut ::c_int,
) -> ::c_int;
pub fn initgroups(user: *const ::c_char, group: ::gid_t) -> ::c_int;
pub fn pthread_mutexattr_getpshared(
attr: *const pthread_mutexattr_t,
pshared: *mut ::c_int,
) -> ::c_int;
pub fn popen(command: *const c_char, mode: *const c_char) -> *mut ::FILE;
pub fn faccessat(
dirfd: ::c_int,
pathname: *const ::c_char,
mode: ::c_int,
flags: ::c_int,
) -> ::c_int;
pub fn pthread_create(
native: *mut ::pthread_t,
attr: *const ::pthread_attr_t,
f: extern "C" fn(*mut ::c_void) -> *mut ::c_void,
value: *mut ::c_void,
) -> ::c_int;
pub fn __errno() -> *mut ::c_int;
pub fn inotify_rm_watch(fd: ::c_int, wd: u32) -> ::c_int;
pub fn sendmmsg(
sockfd: ::c_int,
msgvec: *const ::mmsghdr,
vlen: ::c_uint,
flags: ::c_int,
) -> ::c_int;
pub fn recvmmsg(
sockfd: ::c_int,
msgvec: *mut ::mmsghdr,
vlen: ::c_uint,
flags: ::c_int,
timeout: *const ::timespec,
) -> ::c_int;
pub fn inotify_init() -> ::c_int;
pub fn inotify_init1(flags: ::c_int) -> ::c_int;
pub fn inotify_add_watch(fd: ::c_int, path: *const ::c_char, mask: u32) -> ::c_int;
pub fn regcomp(preg: *mut ::regex_t, pattern: *const ::c_char, cflags: ::c_int) -> ::c_int;
pub fn regexec(
preg: *const ::regex_t,
input: *const ::c_char,
nmatch: ::size_t,
pmatch: *mut regmatch_t,
eflags: ::c_int,
) -> ::c_int;
pub fn regerror(
errcode: ::c_int,
preg: *const ::regex_t,
errbuf: *mut ::c_char,
errbuf_size: ::size_t,
) -> ::size_t;
pub fn regfree(preg: *mut ::regex_t);
pub fn android_set_abort_message(msg: *const ::c_char);
pub fn gettid() -> ::pid_t;
pub fn __system_property_set(__name: *const ::c_char, __value: *const ::c_char) -> ::c_int;
pub fn __system_property_get(__name: *const ::c_char, __value: *mut ::c_char) -> ::c_int;
pub fn __system_property_find(__name: *const ::c_char) -> *const prop_info;
pub fn __system_property_find_nth(__n: ::c_uint) -> *const prop_info;
pub fn __system_property_foreach(
__callback: unsafe extern "C" fn(__pi: *const prop_info, __cookie: *mut ::c_void),
__cookie: *mut ::c_void,
) -> ::c_int;
// #include <link.h>
/// Only available in API Version 21+
pub fn dl_iterate_phdr(
callback: ::Option<
unsafe extern "C" fn(
info: *mut dl_phdr_info,
size: usize,
data: *mut ::c_void,
) -> ::c_int,
>,
data: *mut ::c_void,
) -> ::c_int;
pub fn arc4random() -> u32;
pub fn arc4random_uniform(__upper_bound: u32) -> u32;
pub fn arc4random_buf(__buf: *mut ::c_void, __n: ::size_t);
pub fn reallocarray(ptr: *mut ::c_void, nmemb: ::size_t, size: ::size_t) -> *mut ::c_void;
pub fn pthread_getcpuclockid(thread: ::pthread_t, clk_id: *mut ::clockid_t) -> ::c_int;
}
cfg_if! {
if #[cfg(target_pointer_width = "32")] {
mod b32;
pub use self::b32::*;
} else if #[cfg(target_pointer_width = "64")] {
mod b64;
pub use self::b64::*;
} else {
// Unknown target_pointer_width
}
}
impl siginfo_t {
pub unsafe fn si_addr(&self) -> *mut ::c_void {
#[repr(C)]
struct siginfo_sigfault {
_si_signo: ::c_int,
_si_errno: ::c_int,
_si_code: ::c_int,
si_addr: *mut ::c_void,
}
(*(self as *const siginfo_t as *const siginfo_sigfault)).si_addr
}
pub unsafe fn
|
(&self) -> ::sigval {
#[repr(C)]
struct siginfo_timer {
_si_signo: ::c_int,
_si_errno: ::c_int,
_si_code: ::c_int,
_si_tid: ::c_int,
_si_overrun: ::c_int,
si_sigval: ::sigval,
}
(*(self as *const siginfo_t as *const siginfo_timer)).si_sigval
}
}
cfg_if! {
if #[cfg(libc_union)] {
// Internal, for casts to access union fields
#[repr(C)]
struct sifields_sigchld {
si_pid: ::pid_t,
si_uid: ::uid_t,
si_status: ::c_int,
si_utime: ::c_long,
si_stime: ::c_long,
}
impl ::Copy for sifields_sigchld {}
impl ::Clone for sifields_sigchld {
fn clone(&self) -> sifields_sigchld {
*self
}
}
// Internal, for casts to access union fields
#[repr(C)]
union sifields {
_align_pointer: *mut ::c_void,
sigchld: sifields_sigchld,
}
// Internal, for casts to access union fields. Note that some variants
// of sifields start with a pointer, which makes the alignment of
// sifields vary on 32-bit and 64-bit architectures.
#[repr(C)]
struct siginfo_f {
_siginfo_base: [::c_int; 3],
sifields: sifields,
}
impl siginfo_t {
unsafe fn sifields(&self) -> &sifields {
&(*(self as *const siginfo_t as *const siginfo_f)).sifields
}
pub unsafe fn si_pid(&self) -> ::pid_t {
self.sifields().sigchld.si_pid
}
pub unsafe fn si_uid(&self) -> ::uid_t {
self.sifields().sigchld.si_uid
}
pub unsafe fn si_status(&self) -> ::c_int {
self.sifields().sigchld.si_status
}
pub unsafe fn si_utime(&self) -> ::c_long {
self.sifields().sigchld.si_utime
}
pub unsafe fn si_stime(&self) -> ::c_long {
self.sifields().sigchld.si_stime
}
}
}
}
|
si_value
|
server.py
|
from flask import Flask, render_template
from flask_json import FlaskJSON, JsonError, json_response, as_json
from flask_cors import CORS
import os
import sys
from modules.db import DB
from modules.redis import REDIS
from config import APP_CONFIG
dbconn = DB()
dbconn.create_table()
redisConn = REDIS()
app = Flask(__name__)
FlaskJSON(app)
# set cors to accept all
cors = CORS(app, resources={"/metaserver/api/*": {"origins": "*"}})
NAMESPACE='metaserver/api'
#ping
@app.route(f"/{NAMESPACE}/ping", methods=["POST", "GET"])
def index():
payload = {
'msg' : 'pong'
}
return json_response( callback=payload )
#get title
@app.route(f"/{NAMESPACE}/title/<movietitle>", methods=["GET"])
def get_title(movietitle):
sample_payload = {
"id": "tt0068646",
"href": "https://imdb.com/title/tt0068646",
"title": "The Godfather",
"year": "1972",
"meta": {
"certificate": "18A",
"runtime": "175 min",
"genre": ["Crime", "Drama"],
"meta_score": "100",
"description": "An organized crime ...",
"directors": [
"Francis Ford Coppola"
],
"votes": "1,628,276",
"gross": "$134.97M",
"awards": {
"Actors": "5 Stars",
"Direction": "5 Stars",
"Screenplay": "5 Stars",
"Oscars": "3",
"Oscar Nominations": "11",
"BAFTA Awards": "0",
"BAFTA Nominations": "4",
"Golden Globes": "6",
"Golden Globe Nominations": "8"
},
"cast": [
{
"actor": "Marlon Brando",
"actor_link": "/name/nm0000008/",
"character": "Don Vito Corleone",
"character_link": "/title/tt0068646/characters/nm0000008"
},
...
],
"Country:": ["USA"],
"Language:": ["English","Italian","Latin"],
"Release Date:": "24 March 1972 (Canada)",
"Also Known As:": "Le parrain",
"Filming Locations:": [
"NY Eye and Ear Infirmary, 2nd Avenue & East 13th Street, New York City, New York, USA"
],
"Budget:": "$6,000,000 (estimated)",
"Opening Weekend USA:": "$302,393,19 March 1972",
"Gross USA:": "$134,966,411",
"Cumulative Worldwide Gross:": "$246,120,986",
"Production Co:": ["Paramount Pictures", "Alfran Productions"],
"Runtime:": "175 min",
"Sound Mix:": ["DTS", "Mono"],
"Color:": ["Color"],
"Aspect Ratio:": "1.85 : 1",
"storyline": "The Godfather ....",
"rating": "18A"
}
}
|
return json_response( callback=payload )
@app.errorhandler(400)
def page_not_found(e):
payload = {
'success' : False,
'error' : 'Page Not Found',
}
return json_response( resp=payload )
if __name__ == '__main__':
app.run( debug=True, host='0.0.0.0', port=8282, threaded=True )
|
payload = {
'title' : movietitle,
'payload': sample_payload,
}
|
select-hint-error-example.d.ts
|
import { FormControl } from '@angular/forms';
import * as i0 from "@angular/core";
interface Animal {
name: string;
sound: string;
}
|
selectFormControl: FormControl;
animals: Animal[];
static ɵfac: i0.ɵɵFactoryDeclaration<SelectHintErrorExample, never>;
static ɵcmp: i0.ɵɵComponentDeclaration<SelectHintErrorExample, "select-hint-error-example", never, {}, {}, never, never>;
}
export {};
|
/** @title Select with form field features */
export declare class SelectHintErrorExample {
animalControl: FormControl;
|
requestor.go
|
/*******************************************************************************
* Copyright 2019 Dell Inc.
* Copyright 2021 Intel Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
* @author: Tingyu Zeng, Dell
*******************************************************************************/
package pkg
import (
"crypto/tls"
"crypto/x509"
"io"
"io/ioutil"
"net/http"
"time"
"github.com/edgexfoundry/go-mod-core-contracts/v2/clients/logger"
"github.com/edgexfoundry/go-mod-secrets/v2/pkg/token/fileioperformer"
)
const httpClientTimeoutDuration = 10 * time.Second
type HTTPSRequester interface {
Insecure() Caller
WithTLS(io.Reader, string) Caller
}
type fluentRequester struct {
logger logger.LoggingClient
}
func NewRequester(logger logger.LoggingClient) HTTPSRequester {
return &fluentRequester{logger}
}
func (r *fluentRequester) Insecure() Caller {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, // nolint: gosec
}
return &http.Client{Timeout: httpClientTimeoutDuration, Transport: tr}
}
func (r *fluentRequester) WithTLS(caReader io.Reader, serverName string) Caller {
readCloser := fileioperformer.MakeReadCloser(caReader)
caCert, err := ioutil.ReadAll(readCloser)
defer readCloser.Close()
if err != nil {
r.logger.Error("failed to load rootCA certificate.")
return nil
}
r.logger.Info("successful loading the rootCA certificate.")
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM(caCert)
tr := &http.Transport{
TLSClientConfig: &tls.Config{
RootCAs: caCertPool,
InsecureSkipVerify: false,
ServerName: serverName,
MinVersion: tls.VersionTLS12,
},
TLSHandshakeTimeout: httpClientTimeoutDuration,
}
return &http.Client{Timeout: httpClientTimeoutDuration, Transport: tr}
}
type mockRequester struct {
}
func
|
() *mockRequester {
return &mockRequester{}
}
func (r *mockRequester) Insecure() Caller {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, // nolint: gosec
}
return &http.Client{Timeout: httpClientTimeoutDuration, Transport: tr}
}
|
NewMockRequester
|
MarkdownView.tsx
|
import marked from 'marked';
import hljs from 'highlight.js';
import 'highlight.js/styles/github.css';
import { defineComponent } from '@/vDom';
interface MarkdownOptions {
path: string;
}
marked.setOptions( {
langPrefix: 'language-',
highlight: function( code, lang ) {
return hljs.highlight( code, {
language: lang
} ).value;
}
} );
hljs.initHighlightingOnLoad();
|
const MarkdownComponent = defineComponent<MarkdownOptions>( {
name: 'MarkdownView',
init: () => null,
actions: {
createDomComponent: async( { path, ref } ): Promise<void> => {
const content = await ( await fetch( path ) ).text();
ref.el.innerHTML = marked( content );
}
},
mount: ( { createDomComponent } ) => createDomComponent(),
view: ( { ref } ) =>
<div ref={ref( 'el' )}></div>
} );
export default defineComponent( {
name: 'MarkdownViewExample',
view: () => <MarkdownComponent path='main.md'></MarkdownComponent>
} );
| |
ptr_to_v1_sourcerepository.go
|
// Code generated by pegomock. DO NOT EDIT.
package matchers
import (
"reflect"
v1 "github.com/jenkins-x/jx/pkg/apis/jenkins.io/v1"
"github.com/petergtz/pegomock"
)
func AnyPtrToV1SourceRepository() *v1.SourceRepository {
pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*v1.SourceRepository))(nil)).Elem()))
var nullValue *v1.SourceRepository
return nullValue
}
func EqPtrToV1SourceRepository(value *v1.SourceRepository) *v1.SourceRepository
|
{
pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
var nullValue *v1.SourceRepository
return nullValue
}
|
|
snackbar.ts
|
import { Module } from 'vuex';
import { has as _has, each as _each, isString as _isString } from 'lodash';
import { AxiosError } from 'axios';
export function generateMsg(error: AxiosError): string {
let msg = 'unknown';
if (error.response) {
msg = `Code ${error.response.status}: `
const data = error.response.data;
console.log(data);
if (_has(data, 'errors')) {
_each(data.errors, (v,k) => {
if (_isString(v)) {
msg += `${k}: ${v}; `;
}
else if (_has(v, 'detail')) {
msg += v.detail;
}
else if (_has(v, 'message')) {
msg += v.message;
}
else {
msg += v.code;
}
});
}
else if (_has(data, 'message')) {
msg += data.message;
}
else {
if (error.response.status === 401) {
msg += "Unauthorized! ";
}
msg += error.response.statusText;
}
}
else if (error.request) {
msg="Upstream may have died, or your network sucks.";
}
else {
msg=error.message;
}
return msg;
}
export type SnackbarType = '' | 'info' | 'success' | 'error';
export interface State {
show: boolean;
type: SnackbarType;
msg: string;
}
const snackbarModule: Module<State, any> = {
namespaced: true,
state: {
show: false,
type: '',
msg: '',
},
getters: {
show: (state): boolean => state.show,
msg: (state): string => state.msg,
type: (state): string => state.type,
},
mutations: {
open(state: State, msg: string) {
state.msg = msg;
state.show = true;
state.type = 'info';
},
|
state.show = false;
state.type = '';
},
showSuccess(state: State, msg: string) {
state.msg = msg;
state.show = true;
state.type = 'success';
},
showError(state: State, error: AxiosError | string) {
state.type='error';
state.show=true;
if (typeof error === 'string') {
state.msg = error;
return;
}
state.msg = generateMsg(error);
},
},
};
export default snackbarModule;
|
close(state: State) {
state.msg = '';
|
serviceskus.go
|
package apimanagement
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// ServiceSkusClient is the apiManagement Client
type ServiceSkusClient struct {
BaseClient
}
// NewServiceSkusClient creates an instance of the ServiceSkusClient client.
func NewServiceSkusClient(subscriptionID string) ServiceSkusClient {
return NewServiceSkusClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewServiceSkusClientWithBaseURI creates an instance of the ServiceSkusClient client using a custom endpoint. Use
// this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewServiceSkusClientWithBaseURI(baseURI string, subscriptionID string) ServiceSkusClient {
return ServiceSkusClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// ListAvailableServiceSkus gets all available SKU for a given API Management service
// Parameters:
// resourceGroupName - the name of the resource group.
// serviceName - the name of the API Management service.
func (client ServiceSkusClient) ListAvailableServiceSkus(ctx context.Context, resourceGroupName string, serviceName string) (result ResourceSkuResultsPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ServiceSkusClient.ListAvailableServiceSkus")
defer func() {
sc := -1
if result.rsr.Response.Response != nil {
sc = result.rsr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: serviceName,
Constraints: []validation.Constraint{{Target: "serviceName", Name: validation.MaxLength, Rule: 50, Chain: nil},
{Target: "serviceName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "serviceName", Name: validation.Pattern, Rule: `^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("apimanagement.ServiceSkusClient", "ListAvailableServiceSkus", err.Error())
}
result.fn = client.listAvailableServiceSkusNextResults
req, err := client.ListAvailableServiceSkusPreparer(ctx, resourceGroupName, serviceName)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.ServiceSkusClient", "ListAvailableServiceSkus", nil, "Failure preparing request")
return
}
resp, err := client.ListAvailableServiceSkusSender(req)
if err != nil {
result.rsr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "apimanagement.ServiceSkusClient", "ListAvailableServiceSkus", resp, "Failure sending request")
return
}
result.rsr, err = client.ListAvailableServiceSkusResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.ServiceSkusClient", "ListAvailableServiceSkus", resp, "Failure responding to request")
}
return
}
// ListAvailableServiceSkusPreparer prepares the ListAvailableServiceSkus request.
func (client ServiceSkusClient) ListAvailableServiceSkusPreparer(ctx context.Context, resourceGroupName string, serviceName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"serviceName": autorest.Encode("path", serviceName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-06-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/skus", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAvailableServiceSkusSender sends the ListAvailableServiceSkus request. The method will close the
// http.Response Body if it receives an error.
func (client ServiceSkusClient) ListAvailableServiceSkusSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// ListAvailableServiceSkusResponder handles the response to the ListAvailableServiceSkus request. The method always
// closes the http.Response Body.
func (client ServiceSkusClient) ListAvailableServiceSkusResponder(resp *http.Response) (result ResourceSkuResults, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listAvailableServiceSkusNextResults retrieves the next set of results, if any.
func (client ServiceSkusClient) listAvailableServiceSkusNextResults(ctx context.Context, lastResults ResourceSkuResults) (result ResourceSkuResults, err error) {
req, err := lastResults.resourceSkuResultsPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "apimanagement.ServiceSkusClient", "listAvailableServiceSkusNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListAvailableServiceSkusSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "apimanagement.ServiceSkusClient", "listAvailableServiceSkusNextResults", resp, "Failure sending next results request")
}
result, err = client.ListAvailableServiceSkusResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "apimanagement.ServiceSkusClient", "listAvailableServiceSkusNextResults", resp, "Failure responding to next results request")
}
return
}
// ListAvailableServiceSkusComplete enumerates all values, automatically crossing page boundaries as required.
func (client ServiceSkusClient) ListAvailableServiceSkusComplete(ctx context.Context, resourceGroupName string, serviceName string) (result ResourceSkuResultsIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ServiceSkusClient.ListAvailableServiceSkus")
defer func() {
sc := -1
if result.Response().Response.Response != nil
|
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListAvailableServiceSkus(ctx, resourceGroupName, serviceName)
return
}
|
{
sc = result.page.Response().Response.Response.StatusCode
}
|
gyptest-all.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify the settings that cause a set of programs to be created in
a specific build directory, and that no intermediate built files
get created outside of that build directory hierarchy even when
referred to with deeply-nested ../../.. paths.
"""
import TestGyp
# TODO(mmoss): Make only supports (theoretically) a single, global build
# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
# generators support, so this doesn't work yet for make.
# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
# the "--depth" location, which is one level above 'src', but then this test
# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
# its sources. I'm not sure if make is wrong for writing outside the current
# directory, or if the test is wrong for assuming everything generated is under
# the current directory.
# Android, Ninja, and CMake do not support setting the build directory.
test = TestGyp.TestGyp(formats=['!make', '!ninja', '!android', '!cmake'])
test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
if test.format == 'msvs':
if test.uses_msbuild:
test.must_contain('src/prog1.vcxproj',
'<OutDir>..\\builddir\\Default\\</OutDir>')
else:
test.must_contain('src/prog1.vcproj',
'OutputDirectory="..\\builddir\\Default\\"')
test.relocate('src', 'relocate/src')
test.subdir('relocate/builddir')
# Make sure that all the built ../../etc. files only get put under builddir,
# by making all of relocate read-only and then making only builddir writable.
test.writable('relocate', False)
test.writable('relocate/builddir', True)
# Suppress the test infrastructure's setting SYMROOT on the command line.
test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
expect1 = """\
Hello from prog1.c
Hello from func1.c
"""
expect2 = """\
Hello from subdir2/prog2.c
Hello from func2.c
"""
expect3 = """\
Hello from subdir2/subdir3/prog3.c
Hello from func3.c
"""
expect4 = """\
Hello from subdir2/subdir3/subdir4/prog4.c
Hello from func4.c
"""
expect5 = """\
Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
Hello from func5.c
"""
def
|
(prog, expect):
dir = 'relocate/builddir/Default/'
test.run(program=test.workpath(dir + prog), stdout=expect)
run_builddir('prog1', expect1)
run_builddir('prog2', expect2)
run_builddir('prog3', expect3)
run_builddir('prog4', expect4)
run_builddir('prog5', expect5)
test.pass_test()
|
run_builddir
|
d3-simple-slider.min.js
|
// https://github.com/johnwalley/d3-simple-slider v1.8.0 Copyright 2020 John Walley
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("d3-array"),require("d3-axis"),require("d3-dispatch"),require("d3-drag"),require("d3-ease"),require("d3-scale"),require("d3-selection"),require("d3-transition")):"function"==typeof define&&define.amd?define(["exports","d3-array","d3-axis","d3-dispatch","d3-drag","d3-ease","d3-scale","d3-selection","d3-transition"],e):e((t=t||self).d3=t.d3||{},t.d3,t.d3,t.d3,t.d3,t.d3,t.d3,t.d3)}(this,(function(t,e,a,r,n,l,i,u){"use strict";function s(t){return"translate("+t+",0)"}function
|
(t){return"translate(0,"+t+")"}function o(t,o){o=void 0!==o?o.copy():null;var d=[0],f=[0],m=[0,10],h=100,g=100,p=!0,v="M-5.5,-5.5v10l6,5.5l6,-5.5v-10z",k=null,x=null,y=null,A=null,w=null,b=null,D=null,M=r.dispatch("onchange","start","end","drag"),q=null,z=null,F=null,L=1===t||4===t?-1:1,V=4===t||2===t?-1:1,O=4===t||2===t?"y":"x",R=4===t||2===t?"x":"y",B=1===t||3===t?s:c,E=1===t||3===t?c:s,P=null;switch(t){case 1:P=a.axisTop;break;case 2:P=a.axisRight;break;case 3:P=a.axisBottom;break;case 4:P=a.axisLeft}var T=null,j=null;function H(a){q=a.selection?a.selection():a,o||(o=(o=m[0]instanceof Date?i.scaleTime():i.scaleLinear()).domain(m).range(1===t||3===t?[0,h]:[g,0]).clamp(!0)),z=i.scaleLinear().range(o.range()).domain(o.range()).clamp(!0),d=d.map((function(t){return i.scaleLinear().range(m).domain(m).clamp(!0)(t)})),A=A||o.tickFormat(),b=b||A||o.tickFormat(),q.selectAll(".axis").data([null]).enter().append("g").attr("transform",E(7*L)).attr("class","axis");var r=q.selectAll(".slider").data([null]),l=r.enter().append("g").attr("class","slider").attr("cursor",1===t||3===t?"ew-resize":"ns-resize").call(n.drag().on("start",(function(){u.select(this).classed("active",!0);var a=z(3===t||1===t?u.event.x:u.event.y);F=d[0]===m[0]&&d[1]===m[0]?1:d[0]===m[1]&&d[1]===m[1]?0:e.scan(d.map((function(t){return Math.abs(t-U(o.invert(a)))})));var n=d.map((function(t,e){return e===F?U(o.invert(a)):t}));C(n),M.call("start",r,1===n.length?n[0]:n),_(n,!0)})).on("drag",(function(){var e=c(z(3===t||1===t?u.event.x:u.event.y));C(e),M.call("drag",r,1===e.length?e[0]:e),_(e,!0)})).on("end",(function(){u.select(this).classed("active",!1);var e=c(z(3===t||1===t?u.event.x:u.event.y));C(e),M.call("end",r,1===e.length?e[0]:e),_(e,!0),F=null})));l.append("line").attr("class","track").attr(O+"1",o.range()[0]-8*V).attr("stroke","#bbb").attr("stroke-width",6).attr("stroke-linecap","round"),l.append("line").attr("class","track-inset").attr(O+"1",o.range()[0]-8*V).attr("stroke","#eee").attr("stroke-width",4).attr("stroke-linecap","round"),D&&l.append("line").attr("class","track-fill").attr(O+"1",1===d.length?o.range()[0]-8*V:o(d[0])).attr("stroke",D).attr("stroke-width",4).attr("stroke-linecap","round"),l.append("line").attr("class","track-overlay").attr(O+"1",o.range()[0]-8*V).attr("stroke","transparent").attr("stroke-width",40).attr("stroke-linecap","round").merge(r.select(".track-overlay"));var s=l.selectAll(".parameter-value").data(d).enter().append("g").attr("class","parameter-value").attr("transform",(function(t){return B(o(t))})).attr("font-family","sans-serif").attr("text-anchor",2===t?"start":4===t?"end":"middle");function c(t){var e=U(o.invert(t));return d.map((function(t,a){return 2===d.length?a===F?0===F?Math.min(e,U(d[1])):Math.max(e,U(d[0])):t:a===F?e:t}))}s.append("path").attr("transform","rotate("+90*(t+1)+")").attr("d",v).attr("class","handle").attr("aria-label","handle").attr("aria-valuemax",m[1]).attr("aria-valuemin",m[0]).attr("aria-valuenow",d).attr("aria-orientation",4===t||2===t?"vertical":"horizontal").attr("focusable","true").attr("tabindex",0).attr("fill","white").attr("stroke","#777").on("keydown",(function(t,e){var a=k||(m[1]-m[0])/100;function r(t){return d.map((function(a,r){return 2===d.length?r===e?0===e?Math.min(t,U(d[1])):Math.max(t,U(d[0])):a:r===e?t:a}))}switch(u.event.key){case"ArrowLeft":case"ArrowDown":H.value(r(+d[e]-a)),u.event.preventDefault();break;case"PageDown":H.value(r(+d[e]-2*a)),u.event.preventDefault();break;case"ArrowRight":case"ArrowUp":H.value(r(+d[e]+a)),u.event.preventDefault();break;case"PageUp":H.value(r(+d[e]+2*a)),u.event.preventDefault();break;case"Home":H.value(r(m[0])),u.event.preventDefault();break;case"End":H.value(r(m[1])),u.event.preventDefault()}})),p&&s.append("text").attr("font-size",10).attr(R,27*L).attr("dy",1===t?"0em":3===t?".71em":".32em").attr("transform",d.length>1?"translate(0,0)":null).text((function(t,e){return A(d[e])})),a.select(".track").attr(O+"2",o.range()[1]+8*V),a.select(".track-inset").attr(O+"2",o.range()[1]+8*V),D&&a.select(".track-fill").attr(O+"2",1===d.length?o(d[0]):o(d[1])),a.select(".track-overlay").attr(O+"2",o.range()[1]+8*V),a.select(".axis").call(P(o).tickFormat(A).ticks(w).tickValues(x)),q.select(".axis").select(".domain").remove(),a.select(".axis").attr("transform",E(7*L)),a.selectAll(".axis text").attr("fill","#aaa").attr(R,20*L).attr("dy",1===t?"0em":3===t?".71em":".32em").attr("text-anchor",2===t?"start":4===t?"end":"middle"),a.selectAll(".axis line").attr("stroke","#aaa"),a.selectAll(".parameter-value").attr("transform",(function(t){return B(o(t))})),Q(),j=q.selectAll(".parameter-value text"),T=q.select(".track-fill")}function Q(){if(q&&p){var t=[];if(d.forEach((function(a){var r=[];q.selectAll(".axis .tick").each((function(t){r.push(Math.abs(t-a))})),t.push(e.scan(r))})),q.selectAll(".axis .tick text").attr("opacity",(function(e,a){return~t.indexOf(a)?0:1})),j&&d.length>1){var a,r,n=[],l=[];j.nodes().forEach((function(t,e){a=t.getBoundingClientRect(),r=t.getAttribute("transform").split(/[()]/)[1].split(",")["x"===O?0:1],n[e]=a[O]-parseFloat(r),l[e]=a["x"===O?"width":"height"]})),"x"===O?(r=Math.max(0,(n[0]+l[0]-n[1])/2),j.attr("transform",(function(t,e){return"translate("+(1===e?r:-r)+",0)"}))):(r=Math.max(0,(n[1]+l[1]-n[0])/2),j.attr("transform",(function(t,e){return"translate(0,"+(1===e?-r:r)+")"})))}}}function U(t){if(k){var a=(t-m[0])%k,r=t-a;return 2*a>k&&(r+=k),t instanceof Date?new Date(r):r}if(y){var n=e.scan(y.map((function(e){return Math.abs(t-e)})));return y[n]}return t}function _(t,e){(d[0]!==t[0]||d.length>1&&d[1]!==t[1])&&(d=t,e&&M.call("onchange",H,1===t.length?t[0]:t),Q())}function C(t,e){q&&((e=void 0!==e&&e)?(q.selectAll(".parameter-value").data(t).transition().ease(l.easeQuadOut).duration(200).attr("transform",(function(t){return B(o(t))})).select(".handle").attr("aria-valuenow",(function(t){return t})),D&&T.transition().ease(l.easeQuadOut).duration(200).attr(O+"1",1===d.length?o.range()[0]-8*L:o(t[0])).attr(O+"2",1===d.length?o(t[0]):o(t[1]))):(q.selectAll(".parameter-value").data(t).attr("transform",(function(t){return B(o(t))})).select(".handle").attr("aria-valuenow",(function(t){return t})),D&&T.attr(O+"1",1===d.length?o.range()[0]-8*L:o(t[0])).attr(O+"2",1===d.length?o(t[0]):o(t[1]))),p&&j.text((function(e,a){return b(t[a])})))}return o&&(m=[e.min(o.domain()),e.max(o.domain())],1===t||3===t?h=e.max(o.range())-e.min(o.range()):g=e.max(o.range())-e.min(o.range()),o=o.clamp(!0)),H.min=function(t){return arguments.length?(m[0]=t,o&&o.domain(m),H):m[0]},H.max=function(t){return arguments.length?(m[1]=t,o&&o.domain(m),H):m[1]},H.domain=function(t){return arguments.length?(m=t,o&&o.domain(m),H):m},H.width=function(t){return arguments.length?(h=t,o&&o.range([o.range()[0],o.range()[0]+h]),H):h},H.height=function(t){return arguments.length?(g=t,o&&o.range([o.range()[0],o.range()[0]+g]),H):g},H.tickFormat=function(t){return arguments.length?(A=t,H):A},H.displayFormat=function(t){return arguments.length?(b=t,H):b},H.ticks=function(t){return arguments.length?(w=t,H):w},H.value=function(t){if(!arguments.length)return 1===d.length?d[0]:d;var e=Array.isArray(t)?t:[t];if(e.sort((function(t,e){return t-e})),o){var a=e.map(o).map(z),r=a.map(o.invert).map(U);C(r,!0),_(r,!0)}else d=e;return H},H.silentValue=function(t){if(!arguments.length)return 1===d.length?d[0]:d;var e=Array.isArray(t)?t:[t];if(e.sort((function(t,e){return t-e})),o){var a=e.map(o).map(z),r=a.map(o.invert).map(U);C(r,!1),_(r,!1)}else d=e;return H},H.default=function(t){if(!arguments.length)return 1===f.length?f[0]:f;var e=Array.isArray(t)?t:[t];return e.sort((function(t,e){return t-e})),f=e,d=e,H},H.step=function(t){return arguments.length?(k=t,H):k},H.tickValues=function(t){return arguments.length?(x=t,H):x},H.marks=function(t){return arguments.length?(y=t,H):y},H.handle=function(t){return arguments.length?(v=t,H):v},H.displayValue=function(t){return arguments.length?(p=t,H):p},H.fill=function(t){return arguments.length?(D=t,H):D},H.on=function(){var t=M.on.apply(M,arguments);return t===M?H:t},H}t.sliderBottom=function(t){return o(3,t)},t.sliderHorizontal=function(t){return o(3,t)},t.sliderLeft=function(t){return o(4,t)},t.sliderRight=function(t){return o(2,t)},t.sliderTop=function(t){return o(1,t)},t.sliderVertical=function(t){return o(4,t)},Object.defineProperty(t,"__esModule",{value:!0})}));
|
c
|
desktop.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
|
return [
{
"module_name": "Testapp",
"color": "grey",
"icon": "octicon octicon-file-directory",
"type": "module",
"label": _("Testapp")
}
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.