file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
constants.py | # ./constants.py
import os
import enum
from dotenv import load_dotenv
load_dotenv()
@enum.unique
class InputConfig(enum.Enum):
| '''
Config for the gameplay, Takes input from .env
Value should be something tha can be used by pyAutoGUI
If not available then, uses default input config (mine)
and Yes I use arrow keys, deal with it!
'''
DEFAULT = ''
UP = os.getenv('UP', 'up').lower()
DOWN = os.getenv('DOWN', 'down').lower()
LEFT = os.getenv('LEFT', 'left').lower()
RIGHT = os.getenv('RIGHT', 'right').lower()
FRONT_PUNCH = os.getenv('FRONT_PUNCH', 'a').lower()
BACK_PUNCH = os.getenv('BACK_PUNCH', 's').lower()
FRONT_KICK = os.getenv('FRONT_KICK', 'z').lower()
BACK_KICK = os.getenv('BACK_KICK', 'x').lower()
THROW = os.getenv('THROW', 'd').lower()
TAG = os.getenv('TAG', 'c').lower()
BLOCK = os.getenv('BLOCK', 'space').lower()
FLIP_STANCE = os.getenv('FLIP_STANCE', 'ctrlright').lower()
PAUSE = os.getenv('PAUSE', 'tab').lower()
BACK = os.getenv('BACK', 'backspace').lower() |
|
datatable.min.js | /*
* This combined file was created by the DataTables downloader builder:
* https://datatables.net/download
*
* To rebuild or modify this file with the latest versions of the included
* software please visit:
* https://datatables.net/download/#dt/dt-1.10.20
*
* Included libraries:
* DataTables 1.10.20
*/
/*!
Copyright 2008-2019 SpryMedia Ltd.
This source file is free software, available under the following license:
MIT license - http://datatables.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://www.datatables.net
DataTables 1.10.20
©2008-2019 SpryMedia Ltd - datatables.net/license
*/
var $jscomp = $jscomp || {};
$jscomp.scope = {};
$jscomp.findInternal = function (f, z, y) {
f instanceof String && (f = String(f));
for (var p = f.length, H = 0; H < p; H++) {
var L = f[H];
if (z.call(y, L, H, f)) return {
i: H,
v: L
}
}
return {
i: -1,
v: void 0
}
};
$jscomp.ASSUME_ES5 = !1;
$jscomp.ASSUME_NO_NATIVE_MAP = !1;
$jscomp.ASSUME_NO_NATIVE_SET = !1;
$jscomp.SIMPLE_FROUND_POLYFILL = !1;
$jscomp.defineProperty = $jscomp.ASSUME_ES5 || "function" == typeof Object.defineProperties ? Object.defineProperty : function (f, z, y) {
f != Array.prototype && f != Object.prototype && (f[z] = y.value)
};
$jscomp.getGlobal = function (f) {
return "undefined" != typeof window && window === f ? f : "undefined" != typeof global && null != global ? global : f
};
$jscomp.global = $jscomp.getGlobal(this);
$jscomp.polyfill = function (f, z, y, p) {
if (z) {
y = $jscomp.global;
f = f.split(".");
for (p = 0; p < f.length - 1; p++) {
var H = f[p];
H in y || (y[H] = {});
y = y[H]
}
f = f[f.length - 1];
p = y[f];
z = z(p);
z != p && null != z && $jscomp.defineProperty(y, f, {
configurable: !0,
writable: !0,
value: z
})
}
};
$jscomp.polyfill("Array.prototype.find", function (f) {
return f ? f : function (f, y) {
return $jscomp.findInternal(this, f, y).v
}
}, "es6", "es3");
(function (f) {
"function" === typeof define && define.amd ? define(["jquery"], function (z) {
return f(z, window, document)
}) : "object" === typeof exports ? module.exports = function (z, y) {
z || (z = window);
y || (y = "undefined" !== typeof window ? require("jquery") : require("jquery")(z));
return f(y, z, z.document)
} : f(jQuery, window, document)
})(function (f, z, y, p) {
function H(a) {
var b, c, d = {};
f.each(a, function (e, h) {
(b = e.match(/^([^A-Z]+?)([A-Z])/)) && -1 !== "a aa ai ao as b fn i m o s ".indexOf(b[1] + " ") && (c = e.replace(b[0], b[2].toLowerCase()),
d[c] = e, "o" === b[1] && H(a[e]))
});
a._hungarianMap = d
}
function L(a, b, c) {
a._hungarianMap || H(a);
var d;
f.each(b, function (e, h) {
d = a._hungarianMap[e];
d === p || !c && b[d] !== p || ("o" === d.charAt(0) ? (b[d] || (b[d] = {}), f.extend(!0, b[d], b[e]), L(a[d], b[d], c)) : b[d] = b[e])
})
}
function Ga(a) {
var b = q.defaults.oLanguage,
c = b.sDecimal;
c && Ha(c);
if (a) {
var d = a.sZeroRecords;
!a.sEmptyTable && d && "No data available in table" === b.sEmptyTable && M(a, a, "sZeroRecords", "sEmptyTable");
!a.sLoadingRecords && d && "Loading..." === b.sLoadingRecords && M(a, a,
"sZeroRecords", "sLoadingRecords");
a.sInfoThousands && (a.sThousands = a.sInfoThousands);
(a = a.sDecimal) && c !== a && Ha(a)
}
}
function jb(a) {
F(a, "ordering", "bSort");
F(a, "orderMulti", "bSortMulti");
F(a, "orderClasses", "bSortClasses");
F(a, "orderCellsTop", "bSortCellsTop");
F(a, "order", "aaSorting");
F(a, "orderFixed", "aaSortingFixed");
F(a, "paging", "bPaginate");
F(a, "pagingType", "sPaginationType");
F(a, "pageLength", "iDisplayLength");
F(a, "searching", "bFilter");
"boolean" === typeof a.sScrollX && (a.sScrollX = a.sScrollX ? "100%" :
"");
"boolean" === typeof a.scrollX && (a.scrollX = a.scrollX ? "100%" : "");
if (a = a.aoSearchCols)
for (var b = 0, c = a.length; b < c; b++) a[b] && L(q.models.oSearch, a[b])
}
function kb(a) {
F(a, "orderable", "bSortable");
F(a, "orderData", "aDataSort");
F(a, "orderSequence", "asSorting");
F(a, "orderDataType", "sortDataType");
var b = a.aDataSort;
"number" !== typeof b || f.isArray(b) || (a.aDataSort = [b])
}
function lb(a) {
if (!q.__browser) {
var b = {};
q.__browser = b;
var c = f("<div/>").css({
position: "fixed",
top: 0,
left: -1 * f(z).scrollLeft(),
height: 1,
width: 1,
overflow: "hidden"
}).append(f("<div/>").css({
position: "absolute",
top: 1,
left: 1,
width: 100,
overflow: "scroll"
}).append(f("<div/>").css({
width: "100%",
height: 10
}))).appendTo("body"),
d = c.children(),
e = d.children();
b.barWidth = d[0].offsetWidth - d[0].clientWidth;
b.bScrollOversize = 100 === e[0].offsetWidth && 100 !== d[0].clientWidth;
b.bScrollbarLeft = 1 !== Math.round(e.offset().left);
b.bBounding = c[0].getBoundingClientRect().width ? !0 : !1;
c.remove()
}
f.extend(a.oBrowser, q.__browser);
a.oScroll.iBarWidth = q.__browser.barWidth
}
function mb(a, b, c, d, e, h) {
var g = !1;
if (c !== p) {
var k = c;
g = !0
}
for (; d !== e;) a.hasOwnProperty(d) && (k = g ? b(k, a[d], d, a) : a[d], g = !0, d += h);
return k
}
function Ia(a, b) {
var c = q.defaults.column,
d = a.aoColumns.length;
c = f.extend({}, q.models.oColumn, c, {
nTh: b ? b : y.createElement("th"),
sTitle: c.sTitle ? c.sTitle : b ? b.innerHTML : "",
aDataSort: c.aDataSort ? c.aDataSort : [d],
mData: c.mData ? c.mData : d,
idx: d
});
a.aoColumns.push(c);
c = a.aoPreSearchCols;
c[d] = f.extend({}, q.models.oSearch, c[d]);
ma(a, d, f(b).data())
}
function ma(a, b, c) {
b = a.aoColumns[b];
var d = a.oClasses,
e = f(b.nTh);
if (!b.sWidthOrig) {
b.sWidthOrig = e.attr("width") || null;
var h = (e.attr("style") || "").match(/width:\s*(\d+[pxem%]+)/);
h && (b.sWidthOrig = h[1])
}
c !== p && null !== c && (kb(c), L(q.defaults.column, c, !0), c.mDataProp === p || c.mData || (c.mData = c.mDataProp), c.sType && (b._sManualType = c.sType), c.className && !c.sClass && (c.sClass = c.className), c.sClass && e.addClass(c.sClass), f.extend(b, c), M(b, c, "sWidth", "sWidthOrig"), c.iDataSort !== p && (b.aDataSort = [c.iDataSort]), M(b, c, "aDataSort"));
var g = b.mData,
k = U(g),
l = b.mRender ? U(b.mRender) : null;
c = function (a) {
return "string" === typeof a && -1 !== a.indexOf("@")
};
b._bAttrSrc = f.isPlainObject(g) && (c(g.sort) || c(g.type) || c(g.filter));
b._setter = null;
b.fnGetData = function (a, b, c) {
var d = k(a, b, p, c);
return l && b ? l(d, b, a, c) : d
};
b.fnSetData = function (a, b, c) {
return Q(g)(a, b, c)
};
"number" !== typeof g && (a._rowReadObject = !0);
a.oFeatures.bSort || (b.bSortable = !1, e.addClass(d.sSortableNone));
a = -1 !== f.inArray("asc", b.asSorting);
c = -1 !== f.inArray("desc", b.asSorting);
b.bSortable && (a || c) ? a && !c ? (b.sSortingClass =
d.sSortableAsc, b.sSortingClassJUI = d.sSortJUIAscAllowed) : !a && c ? (b.sSortingClass = d.sSortableDesc, b.sSortingClassJUI = d.sSortJUIDescAllowed) : (b.sSortingClass = d.sSortable, b.sSortingClassJUI = d.sSortJUI) : (b.sSortingClass = d.sSortableNone, b.sSortingClassJUI = "")
}
function aa(a) {
if (!1 !== a.oFeatures.bAutoWidth) {
var b = a.aoColumns;
Ja(a);
for (var c = 0, d = b.length; c < d; c++) b[c].nTh.style.width = b[c].sWidth
}
b = a.oScroll;
"" === b.sY && "" === b.sX || na(a);
A(a, null, "column-sizing", [a])
}
function ba(a, b) {
a = oa(a, "bVisible");
return "number" ===
typeof a[b] ? a[b] : null
}
function ca(a, b) {
a = oa(a, "bVisible");
b = f.inArray(b, a);
return -1 !== b ? b : null
}
function W(a) {
var b = 0;
f.each(a.aoColumns, function (a, d) {
d.bVisible && "none" !== f(d.nTh).css("display") && b++
});
return b
}
function oa(a, b) {
var c = [];
f.map(a.aoColumns, function (a, e) {
a[b] && c.push(e)
});
return c
}
function Ka(a) {
var b = a.aoColumns,
c = a.aoData,
d = q.ext.type.detect,
e, h, g;
var k = 0;
for (e = b.length; k < e; k++) {
var f = b[k];
var n = [];
if (!f.sType && f._sManualType) f.sType = f._sManualType;
else if (!f.sType) {
var m = 0;
for (h =
d.length; m < h; m++) {
var w = 0;
for (g = c.length; w < g; w++) {
n[w] === p && (n[w] = I(a, w, k, "type"));
var u = d[m](n[w], a);
if (!u && m !== d.length - 1) break;
if ("html" === u) break
}
if (u) {
f.sType = u;
break
}
}
f.sType || (f.sType = "string")
}
}
}
function nb(a, b, c, d) {
var e, h, g, k = a.aoColumns;
if (b)
for (e = b.length - 1; 0 <= e; e--) {
var l = b[e];
var n = l.targets !== p ? l.targets : l.aTargets;
f.isArray(n) || (n = [n]);
var m = 0;
for (h = n.length; m < h; m++)
if ("number" === typeof n[m] && 0 <= n[m]) {
for (; k.length <= n[m];) Ia(a);
d(n[m], l)
} else if ("number" === typeof n[m] && 0 > n[m]) d(k.length +
n[m], l);
else if ("string" === typeof n[m]) {
var w = 0;
for (g = k.length; w < g; w++)("_all" == n[m] || f(k[w].nTh).hasClass(n[m])) && d(w, l)
}
}
if (c)
for (e = 0, a = c.length; e < a; e++) d(e, c[e])
}
function R(a, b, c, d) {
var e = a.aoData.length,
h = f.extend(!0, {}, q.models.oRow, {
src: c ? "dom" : "data",
idx: e
});
h._aData = b;
a.aoData.push(h);
for (var g = a.aoColumns, k = 0, l = g.length; k < l; k++) g[k].sType = null;
a.aiDisplayMaster.push(e);
b = a.rowIdFn(b);
b !== p && (a.aIds[b] = h);
!c && a.oFeatures.bDeferRender || La(a, e, c, d);
return e
}
function pa(a, b) {
var c;
b instanceof
f || (b = f(b));
return b.map(function (b, e) {
c = Ma(a, e);
return R(a, c.data, e, c.cells)
})
}
function I(a, b, c, d) {
var e = a.iDraw,
h = a.aoColumns[c],
g = a.aoData[b]._aData,
k = h.sDefaultContent,
f = h.fnGetData(g, d, {
settings: a,
row: b,
col: c
});
if (f === p) return a.iDrawError != e && null === k && (O(a, 0, "Requested unknown parameter " + ("function" == typeof h.mData ? "{function}" : "'" + h.mData + "'") + " for row " + b + ", column " + c, 4), a.iDrawError = e), k;
if ((f === g || null === f) && null !== k && d !== p) f = k;
else if ("function" === typeof f) return f.call(g);
return null ===
f && "display" == d ? "" : f
}
function ob(a, b, c, d) {
a.aoColumns[c].fnSetData(a.aoData[b]._aData, d, {
settings: a,
row: b,
col: c
})
}
function Na(a) {
return f.map(a.match(/(\\.|[^\.])+/g) || [""], function (a) {
return a.replace(/\\\./g, ".")
})
}
function U(a) {
if (f.isPlainObject(a)) {
var b = {};
f.each(a, function (a, c) {
c && (b[a] = U(c))
});
return function (a, c, h, g) {
var d = b[c] || b._;
return d !== p ? d(a, c, h, g) : a
}
}
if (null === a) return function (a) {
return a
};
if ("function" === typeof a) return function (b, c, h, g) {
return a(b, c, h, g)
};
if ("string" !== typeof a ||
-1 === a.indexOf(".") && -1 === a.indexOf("[") && -1 === a.indexOf("(")) return function (b, c) {
return b[a]
};
var c = function (a, b, h) {
if ("" !== h) {
var d = Na(h);
for (var e = 0, l = d.length; e < l; e++) {
h = d[e].match(da);
var n = d[e].match(X);
if (h) {
d[e] = d[e].replace(da, "");
"" !== d[e] && (a = a[d[e]]);
n = [];
d.splice(0, e + 1);
d = d.join(".");
if (f.isArray(a))
for (e = 0, l = a.length; e < l; e++) n.push(c(a[e], b, d));
a = h[0].substring(1, h[0].length - 1);
a = "" === a ? n : n.join(a);
break
} else if (n) {
d[e] = d[e].replace(X, "");
a = a[d[e]]();
continue
}
if (null === a || a[d[e]] ===
p) return p;
a = a[d[e]]
}
}
return a
};
return function (b, e) {
return c(b, e, a)
}
}
function Q(a) {
if (f.isPlainObject(a)) return Q(a._);
if (null === a) return function () {};
if ("function" === typeof a) return function (b, d, e) {
a(b, "set", d, e)
};
if ("string" !== typeof a || -1 === a.indexOf(".") && -1 === a.indexOf("[") && -1 === a.indexOf("(")) return function (b, d) {
b[a] = d
};
var b = function (a, d, e) {
e = Na(e);
var c = e[e.length - 1];
for (var g, k, l = 0, n = e.length - 1; l < n; l++) {
g = e[l].match(da);
k = e[l].match(X);
if (g) {
e[l] = e[l].replace(da, "");
a[e[l]] = [];
c = e.slice();
c.splice(0, l + 1);
g = c.join(".");
if (f.isArray(d))
for (k = 0, n = d.length; k < n; k++) c = {}, b(c, d[k], g), a[e[l]].push(c);
else a[e[l]] = d;
return
}
k && (e[l] = e[l].replace(X, ""), a = a[e[l]](d));
if (null === a[e[l]] || a[e[l]] === p) a[e[l]] = {};
a = a[e[l]]
}
if (c.match(X)) a[c.replace(X, "")](d);
else a[c.replace(da, "")] = d
};
return function (c, d) {
return b(c, d, a)
}
}
function Oa(a) {
return J(a.aoData, "_aData")
}
function qa(a) {
a.aoData.length = 0;
a.aiDisplayMaster.length = 0;
a.aiDisplay.length = 0;
a.aIds = {}
}
function ra(a, b, c) {
for (var d = -1, e = 0, h = a.length; e <
h; e++) a[e] == b ? d = e : a[e] > b && a[e]--; - 1 != d && c === p && a.splice(d, 1)
}
function ea(a, b, c, d) {
var e = a.aoData[b],
h, g = function (c, d) {
for (; c.childNodes.length;) c.removeChild(c.firstChild);
c.innerHTML = I(a, b, d, "display")
};
if ("dom" !== c && (c && "auto" !== c || "dom" !== e.src)) {
var k = e.anCells;
if (k)
if (d !== p) g(k[d], d);
else
for (c = 0, h = k.length; c < h; c++) g(k[c], c)
} else e._aData = Ma(a, e, d, d === p ? p : e._aData).data;
e._aSortData = null;
e._aFilterData = null;
g = a.aoColumns;
if (d !== p) g[d].sType = null;
else {
c = 0;
for (h = g.length; c < h; c++) g[c].sType = null;
Pa(a, e)
}
}
function Ma(a, b, c, d) {
var e = [],
h = b.firstChild,
g, k = 0,
l, n = a.aoColumns,
m = a._rowReadObject;
d = d !== p ? d : m ? {} : [];
var w = function (a, b) {
if ("string" === typeof a) {
var c = a.indexOf("@"); - 1 !== c && (c = a.substring(c + 1), Q(a)(d, b.getAttribute(c)))
}
},
u = function (a) {
if (c === p || c === k) g = n[k], l = f.trim(a.innerHTML), g && g._bAttrSrc ? (Q(g.mData._)(d, l), w(g.mData.sort, a), w(g.mData.type, a), w(g.mData.filter, a)) : m ? (g._setter || (g._setter = Q(g.mData)), g._setter(d, l)) : d[k] = l;
k++
};
if (h)
for (; h;) {
var q = h.nodeName.toUpperCase();
if ("TD" ==
q || "TH" == q) u(h), e.push(h);
h = h.nextSibling
} else
for (e = b.anCells, h = 0, q = e.length; h < q; h++) u(e[h]);
(b = b.firstChild ? b : b.nTr) && (b = b.getAttribute("id")) && Q(a.rowId)(d, b);
return {
data: d,
cells: e
}
}
function La(a, b, c, d) {
var e = a.aoData[b],
h = e._aData,
g = [],
k, l;
if (null === e.nTr) {
var n = c || y.createElement("tr");
e.nTr = n;
e.anCells = g;
n._DT_RowIndex = b;
Pa(a, e);
var m = 0;
for (k = a.aoColumns.length; m < k; m++) {
var w = a.aoColumns[m];
var p = (l = c ? !1 : !0) ? y.createElement(w.sCellType) : d[m];
p._DT_CellIndex = {
row: b,
column: m
};
g.push(p);
if (l ||
!(c && !w.mRender && w.mData === m || f.isPlainObject(w.mData) && w.mData._ === m + ".display")) p.innerHTML = I(a, b, m, "display");
w.sClass && (p.className += " " + w.sClass);
w.bVisible && !c ? n.appendChild(p) : !w.bVisible && c && p.parentNode.removeChild(p);
w.fnCreatedCell && w.fnCreatedCell.call(a.oInstance, p, I(a, b, m), h, b, m)
}
A(a, "aoRowCreatedCallback", null, [n, h, b, g])
}
e.nTr.setAttribute("role", "row")
}
function Pa(a, b) {
var c = b.nTr,
d = b._aData;
if (c) {
if (a = a.rowIdFn(d)) c.id = a;
d.DT_RowClass && (a = d.DT_RowClass.split(" "), b.__rowc = b.__rowc ?
ta(b.__rowc.concat(a)) : a, f(c).removeClass(b.__rowc.join(" ")).addClass(d.DT_RowClass));
d.DT_RowAttr && f(c).attr(d.DT_RowAttr);
d.DT_RowData && f(c).data(d.DT_RowData)
}
}
function pb(a) {
var b, c, d = a.nTHead,
e = a.nTFoot,
h = 0 === f("th, td", d).length,
g = a.oClasses,
k = a.aoColumns;
h && (c = f("<tr/>").appendTo(d));
var l = 0;
for (b = k.length; l < b; l++) {
var n = k[l];
var m = f(n.nTh).addClass(n.sClass);
h && m.appendTo(c);
a.oFeatures.bSort && (m.addClass(n.sSortingClass), !1 !== n.bSortable && (m.attr("tabindex", a.iTabIndex).attr("aria-controls",
a.sTableId), Qa(a, n.nTh, l)));
n.sTitle != m[0].innerHTML && m.html(n.sTitle);
Ra(a, "header")(a, m, n, g)
}
h && fa(a.aoHeader, d);
f(d).find(">tr").attr("role", "row");
f(d).find(">tr>th, >tr>td").addClass(g.sHeaderTH);
f(e).find(">tr>th, >tr>td").addClass(g.sFooterTH);
if (null !== e)
for (a = a.aoFooter[0], l = 0, b = a.length; l < b; l++) n = k[l], n.nTf = a[l].cell, n.sClass && f(n.nTf).addClass(n.sClass)
}
function ha(a, b, c) {
var d, e, h = [],
g = [],
k = a.aoColumns.length;
if (b) {
c === p && (c = !1);
var l = 0;
for (d = b.length; l < d; l++) {
h[l] = b[l].slice();
h[l].nTr =
b[l].nTr;
for (e = k - 1; 0 <= e; e--) a.aoColumns[e].bVisible || c || h[l].splice(e, 1);
g.push([])
}
l = 0;
for (d = h.length; l < d; l++) {
if (a = h[l].nTr)
for (; e = a.firstChild;) a.removeChild(e);
e = 0;
for (b = h[l].length; e < b; e++) {
var n = k = 1;
if (g[l][e] === p) {
a.appendChild(h[l][e].cell);
for (g[l][e] = 1; h[l + k] !== p && h[l][e].cell == h[l + k][e].cell;) g[l + k][e] = 1, k++;
for (; h[l][e + n] !== p && h[l][e].cell == h[l][e + n].cell;) {
for (c = 0; c < k; c++) g[l + c][e + n] = 1;
n++
}
f(h[l][e].cell).attr("rowspan", k).attr("colspan", n)
}
}
}
}
}
function S(a) {
var b = A(a, "aoPreDrawCallback",
"preDraw", [a]);
if (-1 !== f.inArray(!1, b)) K(a, !1);
else {
b = [];
var c = 0,
d = a.asStripeClasses,
e = d.length,
h = a.oLanguage,
g = a.iInitDisplayStart,
k = "ssp" == D(a),
l = a.aiDisplay;
a.bDrawing = !0;
g !== p && -1 !== g && (a._iDisplayStart = k ? g : g >= a.fnRecordsDisplay() ? 0 : g, a.iInitDisplayStart = -1);
g = a._iDisplayStart;
var n = a.fnDisplayEnd();
if (a.bDeferLoading) a.bDeferLoading = !1, a.iDraw++, K(a, !1);
else if (!k) a.iDraw++;
else if (!a.bDestroying && !qb(a)) return;
if (0 !== l.length)
for (h = k ? a.aoData.length : n, k = k ? 0 : g; k < h; k++) {
var m = l[k],
w = a.aoData[m];
null === w.nTr && La(a, m);
var u = w.nTr;
if (0 !== e) {
var q = d[c % e];
w._sRowStripe != q && (f(u).removeClass(w._sRowStripe).addClass(q), w._sRowStripe = q)
}
A(a, "aoRowCallback", null, [u, w._aData, c, k, m]);
b.push(u);
c++
} else c = h.sZeroRecords, 1 == a.iDraw && "ajax" == D(a) ? c = h.sLoadingRecords : h.sEmptyTable && 0 === a.fnRecordsTotal() && (c = h.sEmptyTable), b[0] = f("<tr/>", {
"class": e ? d[0] : ""
}).append(f("<td />", {
valign: "top",
colSpan: W(a),
"class": a.oClasses.sRowEmpty
}).html(c))[0];
A(a, "aoHeaderCallback", "header", [f(a.nTHead).children("tr")[0],
Oa(a), g, n, l
]);
A(a, "aoFooterCallback", "footer", [f(a.nTFoot).children("tr")[0], Oa(a), g, n, l]);
d = f(a.nTBody);
d.children().detach();
d.append(f(b));
A(a, "aoDrawCallback", "draw", [a]);
a.bSorted = !1;
a.bFiltered = !1;
a.bDrawing = !1
}
}
function V(a, b) {
var c = a.oFeatures,
d = c.bFilter;
c.bSort && rb(a);
d ? ia(a, a.oPreviousSearch) : a.aiDisplay = a.aiDisplayMaster.slice();
!0 !== b && (a._iDisplayStart = 0);
a._drawHold = b;
S(a);
a._drawHold = !1
}
function sb(a) {
var b = a.oClasses,
c = f(a.nTable);
c = f("<div/>").insertBefore(c);
var d = a.oFeatures,
e =
f("<div/>", {
id: a.sTableId + "_wrapper",
"class": b.sWrapper + (a.nTFoot ? "" : " " + b.sNoFooter)
});
a.nHolding = c[0];
a.nTableWrapper = e[0];
a.nTableReinsertBefore = a.nTable.nextSibling;
for (var h = a.sDom.split(""), g, k, l, n, m, p, u = 0; u < h.length; u++) {
g = null;
k = h[u];
if ("<" == k) {
l = f("<div/>")[0];
n = h[u + 1];
if ("'" == n || '"' == n) {
m = "";
for (p = 2; h[u + p] != n;) m += h[u + p], p++;
"H" == m ? m = b.sJUIHeader : "F" == m && (m = b.sJUIFooter); - 1 != m.indexOf(".") ? (n = m.split("."), l.id = n[0].substr(1, n[0].length - 1), l.className = n[1]) : "#" == m.charAt(0) ? l.id = m.substr(1,
m.length - 1) : l.className = m;
u += p
}
e.append(l);
e = f(l)
} else if (">" == k) e = e.parent();
else if ("l" == k && d.bPaginate && d.bLengthChange) g = tb(a);
else if ("f" == k && d.bFilter) g = ub(a);
else if ("r" == k && d.bProcessing) g = vb(a);
else if ("t" == k) g = wb(a);
else if ("i" == k && d.bInfo) g = xb(a);
else if ("p" == k && d.bPaginate) g = yb(a);
else if (0 !== q.ext.feature.length)
for (l = q.ext.feature, p = 0, n = l.length; p < n; p++)
if (k == l[p].cFeature) {
g = l[p].fnInit(a);
break
} g && (l = a.aanFeatures, l[k] || (l[k] = []), l[k].push(g), e.append(g))
}
c.replaceWith(e);
a.nHolding =
null
}
function fa(a, b) {
b = f(b).children("tr");
var c, d, e;
a.splice(0, a.length);
var h = 0;
for (e = b.length; h < e; h++) a.push([]);
h = 0;
for (e = b.length; h < e; h++) {
var g = b[h];
for (c = g.firstChild; c;) {
if ("TD" == c.nodeName.toUpperCase() || "TH" == c.nodeName.toUpperCase()) {
var k = 1 * c.getAttribute("colspan");
var l = 1 * c.getAttribute("rowspan");
k = k && 0 !== k && 1 !== k ? k : 1;
l = l && 0 !== l && 1 !== l ? l : 1;
var n = 0;
for (d = a[h]; d[n];) n++;
var m = n;
var p = 1 === k ? !0 : !1;
for (d = 0; d < k; d++)
for (n = 0; n < l; n++) a[h + n][m + d] = {
cell: c,
unique: p
}, a[h + n].nTr = g
}
c = c.nextSibling
}
}
}
function ua(a, b, c) {
var d = [];
c || (c = a.aoHeader, b && (c = [], fa(c, b)));
b = 0;
for (var e = c.length; b < e; b++)
for (var h = 0, g = c[b].length; h < g; h++) !c[b][h].unique || d[h] && a.bSortCellsTop || (d[h] = c[b][h].cell);
return d
}
function va(a, b, c) {
A(a, "aoServerParams", "serverParams", [b]);
if (b && f.isArray(b)) {
var d = {},
e = /(.*?)\[\]$/;
f.each(b, function (a, b) {
(a = b.name.match(e)) ? (a = a[0], d[a] || (d[a] = []), d[a].push(b.value)) : d[b.name] = b.value
});
b = d
}
var h = a.ajax,
g = a.oInstance,
k = function (b) {
A(a, null, "xhr", [a, b, a.jqXHR]);
c(b)
};
if (f.isPlainObject(h) &&
h.data) {
var l = h.data;
var n = "function" === typeof l ? l(b, a) : l;
b = "function" === typeof l && n ? n : f.extend(!0, b, n);
delete h.data
}
n = {
data: b,
success: function (b) {
var c = b.error || b.sError;
c && O(a, 0, c);
a.json = b;
k(b)
},
dataType: "json",
cache: !1,
type: a.sServerMethod,
error: function (b, c, d) {
d = A(a, null, "xhr", [a, null, a.jqXHR]); - 1 === f.inArray(!0, d) && ("parsererror" == c ? O(a, 0, "Invalid JSON response", 1) : 4 === b.readyState && O(a, 0, "Ajax error", 7));
K(a, !1)
}
};
a.oAjaxData = b;
A(a, null, "preXhr", [a, b]);
a.fnServerData ? a.fnServerData.call(g,
a.sAjaxSource, f.map(b, function (a, b) {
return {
name: b,
value: a
}
}), k, a) : a.sAjaxSource || "string" === typeof h ? a.jqXHR = f.ajax(f.extend(n, {
url: h || a.sAjaxSource
})) : "function" === typeof h ? a.jqXHR = h.call(g, b, k, a) : (a.jqXHR = f.ajax(f.extend(n, h)), h.data = l)
}
function qb(a) {
return a.bAjaxDataGet ? (a.iDraw++, K(a, !0), va(a, zb(a), function (b) {
Ab(a, b)
}), !1) : !0
}
function zb(a) {
var b = a.aoColumns,
c = b.length,
d = a.oFeatures,
e = a.oPreviousSearch,
h = a.aoPreSearchCols,
g = [],
k = Y(a);
var l = a._iDisplayStart;
var n = !1 !== d.bPaginate ? a._iDisplayLength :
-1;
var m = function (a, b) {
g.push({
name: a,
value: b
})
};
m("sEcho", a.iDraw);
m("iColumns", c);
m("sColumns", J(b, "sName").join(","));
m("iDisplayStart", l);
m("iDisplayLength", n);
var p = {
draw: a.iDraw,
columns: [],
order: [],
start: l,
length: n,
search: {
value: e.sSearch,
regex: e.bRegex
}
};
for (l = 0; l < c; l++) {
var u = b[l];
var sa = h[l];
n = "function" == typeof u.mData ? "function" : u.mData;
p.columns.push({
data: n,
name: u.sName,
searchable: u.bSearchable,
orderable: u.bSortable,
search: {
value: sa.sSearch,
regex: sa.bRegex
}
});
m("mDataProp_" + l, n);
d.bFilter &&
(m("sSearch_" + l, sa.sSearch), m("bRegex_" + l, sa.bRegex), m("bSearchable_" + l, u.bSearchable));
d.bSort && m("bSortable_" + l, u.bSortable)
}
d.bFilter && (m("sSearch", e.sSearch), m("bRegex", e.bRegex));
d.bSort && (f.each(k, function (a, b) {
p.order.push({
column: b.col,
dir: b.dir
});
m("iSortCol_" + a, b.col);
m("sSortDir_" + a, b.dir)
}), m("iSortingCols", k.length));
b = q.ext.legacy.ajax;
return null === b ? a.sAjaxSource ? g : p : b ? g : p
}
function Ab(a, b) {
var c = function (a, c) {
return b[a] !== p ? b[a] : b[c]
},
d = wa(a, b),
e = c("sEcho", "draw"),
h = c("iTotalRecords",
"recordsTotal");
c = c("iTotalDisplayRecords", "recordsFiltered");
if (e) {
if (1 * e < a.iDraw) return;
a.iDraw = 1 * e
}
qa(a);
a._iRecordsTotal = parseInt(h, 10);
a._iRecordsDisplay = parseInt(c, 10);
e = 0;
for (h = d.length; e < h; e++) R(a, d[e]);
a.aiDisplay = a.aiDisplayMaster.slice();
a.bAjaxDataGet = !1;
S(a);
a._bInitComplete || xa(a, b);
a.bAjaxDataGet = !0;
K(a, !1)
}
function wa(a, b) {
a = f.isPlainObject(a.ajax) && a.ajax.dataSrc !== p ? a.ajax.dataSrc : a.sAjaxDataProp;
return "data" === a ? b.aaData || b[a] : "" !== a ? U(a)(b) : b
}
function ub(a) {
var b = a.oClasses,
c =
a.sTableId,
d = a.oLanguage,
e = a.oPreviousSearch,
h = a.aanFeatures,
g = '<input type="search" class="' + b.sFilterInput + '"/>',
k = d.sSearch;
k = k.match(/_INPUT_/) ? k.replace("_INPUT_", g) : k + g;
b = f("<div/>", {
id: h.f ? null : c + "_filter",
"class": b.sFilter
}).append(f("<label/>").append(k));
h = function () {
var b = this.value ? this.value : "";
b != e.sSearch && (ia(a, {
sSearch: b,
bRegex: e.bRegex,
bSmart: e.bSmart,
bCaseInsensitive: e.bCaseInsensitive
}), a._iDisplayStart = 0, S(a))
};
g = null !== a.searchDelay ? a.searchDelay : "ssp" === D(a) ? 400 : 0;
var l = f("input",
b).val(e.sSearch).attr("placeholder", d.sSearchPlaceholder).on("keyup.DT search.DT input.DT paste.DT cut.DT", g ? Sa(h, g) : h).on("keypress.DT", function (a) {
if (13 == a.keyCode) return !1
}).attr("aria-controls", c);
f(a.nTable).on("search.dt.DT", function (b, c) {
if (a === c) try {
l[0] !== y.activeElement && l.val(e.sSearch)
} catch (w) {}
});
return b[0]
}
function ia(a, b, c) {
var d = a.oPreviousSearch,
e = a.aoPreSearchCols,
h = function (a) {
d.sSearch = a.sSearch;
d.bRegex = a.bRegex;
d.bSmart = a.bSmart;
d.bCaseInsensitive = a.bCaseInsensitive
},
g = function (a) {
return a.bEscapeRegex !==
p ? !a.bEscapeRegex : a.bRegex
};
Ka(a);
if ("ssp" != D(a)) {
Bb(a, b.sSearch, c, g(b), b.bSmart, b.bCaseInsensitive);
h(b);
for (b = 0; b < e.length; b++) Cb(a, e[b].sSearch, b, g(e[b]), e[b].bSmart, e[b].bCaseInsensitive);
Db(a)
} else h(b);
a.bFiltered = !0;
A(a, null, "search", [a])
}
function Db(a) {
for (var b = q.ext.search, c = a.aiDisplay, d, e, h = 0, g = b.length; h < g; h++) {
for (var k = [], l = 0, n = c.length; l < n; l++) e = c[l], d = a.aoData[e], b[h](a, d._aFilterData, e, d._aData, l) && k.push(e);
c.length = 0;
f.merge(c, k)
}
}
function Cb(a, b, c, d, e, h) {
if ("" !== b) {
var g = [],
k =
a.aiDisplay;
d = Ta(b, d, e, h);
for (e = 0; e < k.length; e++) b = a.aoData[k[e]]._aFilterData[c], d.test(b) && g.push(k[e]);
a.aiDisplay = g
}
}
function Bb(a, b, c, d, e, h) {
e = Ta(b, d, e, h);
var g = a.oPreviousSearch.sSearch,
k = a.aiDisplayMaster;
h = [];
0 !== q.ext.search.length && (c = !0);
var f = Eb(a);
if (0 >= b.length) a.aiDisplay = k.slice();
else {
if (f || c || d || g.length > b.length || 0 !== b.indexOf(g) || a.bSorted) a.aiDisplay = k.slice();
b = a.aiDisplay;
for (c = 0; c < b.length; c++) e.test(a.aoData[b[c]]._sFilterRow) && h.push(b[c]);
a.aiDisplay = h
}
}
function Ta(a, b,
c, d) {
a = b ? a : Ua(a);
c && (a = "^(?=.*?" + f.map(a.match(/"[^"]+"|[^ ]+/g) || [""], function (a) {
if ('"' === a.charAt(0)) {
var b = a.match(/^"(.*)"$/);
a = b ? b[1] : a
}
return a.replace('"', "")
}).join(")(?=.*?") + ").*$");
return new RegExp(a, d ? "i" : "")
}
function Eb(a) {
var b = a.aoColumns,
c, d, e = q.ext.type.search;
var h = !1;
var g = 0;
for (c = a.aoData.length; g < c; g++) {
var k = a.aoData[g];
if (!k._aFilterData) {
var f = [];
var n = 0;
for (d = b.length; n < d; n++) {
h = b[n];
if (h.bSearchable) {
var m = I(a, g, n, "filter");
e[h.sType] && (m = e[h.sType](m));
null === m && (m = "");
"string" !== typeof m && m.toString && (m = m.toString())
} else m = "";
m.indexOf && -1 !== m.indexOf("&") && (ya.innerHTML = m, m = $b ? ya.textContent : ya.innerText);
m.replace && (m = m.replace(/[\r\n\u2028]/g, ""));
f.push(m)
}
k._aFilterData = f;
k._sFilterRow = f.join(" ");
h = !0
}
}
return h
}
function Fb(a) {
return {
search: a.sSearch,
smart: a.bSmart,
regex: a.bRegex,
caseInsensitive: a.bCaseInsensitive
}
}
function Gb(a) {
return {
sSearch: a.search,
bSmart: a.smart,
bRegex: a.regex,
bCaseInsensitive: a.caseInsensitive
}
}
function xb(a) {
var b = a.sTableId,
c = a.aanFeatures.i,
d = f("<div/>", {
"class": a.oClasses.sInfo,
id: c ? null : b + "_info"
});
c || (a.aoDrawCallback.push({
fn: Hb,
sName: "information"
}), d.attr("role", "status").attr("aria-live", "polite"), f(a.nTable).attr("aria-describedby", b + "_info"));
return d[0]
}
function Hb(a) {
var b = a.aanFeatures.i;
if (0 !== b.length) {
var c = a.oLanguage,
d = a._iDisplayStart + 1,
e = a.fnDisplayEnd(),
h = a.fnRecordsTotal(),
g = a.fnRecordsDisplay(),
k = g ? c.sInfo : c.sInfoEmpty;
g !== h && (k += " " + c.sInfoFiltered);
k += c.sInfoPostFix;
k = Ib(a, k);
c = c.fnInfoCallback;
null !== c && (k = c.call(a.oInstance,
a, d, e, h, g, k));
f(b).html(k)
}
}
function Ib(a, b) {
var c = a.fnFormatNumber,
d = a._iDisplayStart + 1,
e = a._iDisplayLength,
h = a.fnRecordsDisplay(),
g = -1 === e;
return b.replace(/_START_/g, c.call(a, d)).replace(/_END_/g, c.call(a, a.fnDisplayEnd())).replace(/_MAX_/g, c.call(a, a.fnRecordsTotal())).replace(/_TOTAL_/g, c.call(a, h)).replace(/_PAGE_/g, c.call(a, g ? 1 : Math.ceil(d / e))).replace(/_PAGES_/g, c.call(a, g ? 1 : Math.ceil(h / e)))
}
function ja(a) {
var b = a.iInitDisplayStart,
c = a.aoColumns;
var d = a.oFeatures;
var e = a.bDeferLoading;
if (a.bInitialised) {
sb(a);
pb(a);
ha(a, a.aoHeader);
ha(a, a.aoFooter);
K(a, !0);
d.bAutoWidth && Ja(a);
var h = 0;
for (d = c.length; h < d; h++) {
var g = c[h];
g.sWidth && (g.nTh.style.width = B(g.sWidth))
}
A(a, null, "preInit", [a]);
V(a);
c = D(a);
if ("ssp" != c || e) "ajax" == c ? va(a, [], function (c) {
var d = wa(a, c);
for (h = 0; h < d.length; h++) R(a, d[h]);
a.iInitDisplayStart = b;
V(a);
K(a, !1);
xa(a, c)
}, a) : (K(a, !1), xa(a))
} else setTimeout(function () {
ja(a)
}, 200)
}
function xa(a, b) {
a._bInitComplete = !0;
(b || a.oInit.aaData) && aa(a);
A(a, null, "plugin-init", [a, b]);
A(a, "aoInitComplete", "init",
[a, b])
}
function Va(a, b) {
b = parseInt(b, 10);
a._iDisplayLength = b;
Wa(a);
A(a, null, "length", [a, b])
}
function tb(a) {
var b = a.oClasses,
c = a.sTableId,
d = a.aLengthMenu,
e = f.isArray(d[0]),
h = e ? d[0] : d;
d = e ? d[1] : d;
e = f("<select/>", {
name: c + "_length",
"aria-controls": c,
"class": b.sLengthSelect
});
for (var g = 0, k = h.length; g < k; g++) e[0][g] = new Option("number" === typeof d[g] ? a.fnFormatNumber(d[g]) : d[g], h[g]);
var l = f("<div><label/></div>").addClass(b.sLength);
a.aanFeatures.l || (l[0].id = c + "_length");
l.children().append(a.oLanguage.sLengthMenu.replace("_MENU_",
e[0].outerHTML));
f("select", l).val(a._iDisplayLength).on("change.DT", function (b) {
Va(a, f(this).val());
S(a)
});
f(a.nTable).on("length.dt.DT", function (b, c, d) {
a === c && f("select", l).val(d)
});
return l[0]
}
function yb(a) {
var b = a.sPaginationType,
c = q.ext.pager[b],
d = "function" === typeof c,
e = function (a) {
S(a)
};
b = f("<div/>").addClass(a.oClasses.sPaging + b)[0];
var h = a.aanFeatures;
d || c.fnInit(a, b, e);
h.p || (b.id = a.sTableId + "_paginate", a.aoDrawCallback.push({
fn: function (a) {
if (d) {
var b = a._iDisplayStart,
g = a._iDisplayLength,
f = a.fnRecordsDisplay(),
m = -1 === g;
b = m ? 0 : Math.ceil(b / g);
g = m ? 1 : Math.ceil(f / g);
f = c(b, g);
var p;
m = 0;
for (p = h.p.length; m < p; m++) Ra(a, "pageButton")(a, h.p[m], m, f, b, g)
} else c.fnUpdate(a, e)
},
sName: "pagination"
}));
return b
}
function Xa(a, b, c) {
var d = a._iDisplayStart,
e = a._iDisplayLength,
h = a.fnRecordsDisplay();
0 === h || -1 === e ? d = 0 : "number" === typeof b ? (d = b * e, d > h && (d = 0)) : "first" == b ? d = 0 : "previous" == b ? (d = 0 <= e ? d - e : 0, 0 > d && (d = 0)) : "next" == b ? d + e < h && (d += e) : "last" == b ? d = Math.floor((h - 1) / e) * e : O(a, 0, "Unknown paging action: " + b, 5);
b =
a._iDisplayStart !== d;
a._iDisplayStart = d;
b && (A(a, null, "page", [a]), c && S(a));
return b
}
function vb(a) {
return f("<div/>", {
id: a.aanFeatures.r ? null : a.sTableId + "_processing",
"class": a.oClasses.sProcessing
}).html(a.oLanguage.sProcessing).insertBefore(a.nTable)[0]
}
function K(a, b) {
a.oFeatures.bProcessing && f(a.aanFeatures.r).css("display", b ? "block" : "none");
A(a, null, "processing", [a, b])
}
function wb(a) {
var b = f(a.nTable);
b.attr("role", "grid");
var c = a.oScroll;
if ("" === c.sX && "" === c.sY) return a.nTable;
var d = c.sX,
e = c.sY,
h = a.oClasses,
g = b.children("caption"),
k = g.length ? g[0]._captionSide : null,
l = f(b[0].cloneNode(!1)),
n = f(b[0].cloneNode(!1)),
m = b.children("tfoot");
m.length || (m = null);
l = f("<div/>", {
"class": h.sScrollWrapper
}).append(f("<div/>", {
"class": h.sScrollHead
}).css({
overflow: "hidden",
position: "relative",
border: 0,
width: d ? d ? B(d) : null : "100%"
}).append(f("<div/>", {
"class": h.sScrollHeadInner
}).css({
"box-sizing": "content-box",
width: c.sXInner || "100%"
}).append(l.removeAttr("id").css("margin-left", 0).append("top" === k ? g : null).append(b.children("thead"))))).append(f("<div/>", {
"class": h.sScrollBody
}).css({
position: "relative",
overflow: "auto",
width: d ? B(d) : null
}).append(b));
m && l.append(f("<div/>", {
"class": h.sScrollFoot
}).css({
overflow: "hidden",
border: 0,
width: d ? d ? B(d) : null : "100%"
}).append(f("<div/>", {
"class": h.sScrollFootInner
}).append(n.removeAttr("id").css("margin-left", 0).append("bottom" === k ? g : null).append(b.children("tfoot")))));
b = l.children();
var p = b[0];
h = b[1];
var u = m ? b[2] : null;
if (d) f(h).on("scroll.DT", function (a) {
a = this.scrollLeft;
p.scrollLeft = a;
m && (u.scrollLeft = a)
});
f(h).css(e && c.bCollapse ? "max-height" : "height", e);
a.nScrollHead = p;
a.nScrollBody = h;
a.nScrollFoot = u;
a.aoDrawCallback.push({
fn: na,
sName: "scrolling"
});
return l[0]
}
function na(a) {
var b = a.oScroll,
c = b.sX,
d = b.sXInner,
e = b.sY;
b = b.iBarWidth;
var h = f(a.nScrollHead),
g = h[0].style,
k = h.children("div"),
l = k[0].style,
n = k.children("table");
k = a.nScrollBody;
var m = f(k),
w = k.style,
u = f(a.nScrollFoot).children("div"),
q = u.children("table"),
t = f(a.nTHead),
r = f(a.nTable),
v = r[0],
za = v.style,
T = a.nTFoot ? f(a.nTFoot) : null,
A = a.oBrowser,
x = A.bScrollOversize,
ac = J(a.aoColumns, "nTh"),
Ya = [],
y = [],
z = [],
C = [],
G, H = function (a) {
a = a.style;
a.paddingTop = "0";
a.paddingBottom = "0";
a.borderTopWidth = "0";
a.borderBottomWidth = "0";
a.height = 0
};
var D = k.scrollHeight > k.clientHeight;
if (a.scrollBarVis !== D && a.scrollBarVis !== p) a.scrollBarVis = D, aa(a);
else {
a.scrollBarVis = D;
r.children("thead, tfoot").remove();
if (T) {
var E = T.clone().prependTo(r);
var F = T.find("tr");
E = E.find("tr")
}
var I = t.clone().prependTo(r);
t = t.find("tr");
D = I.find("tr");
I.find("th, td").removeAttr("tabindex");
c || (w.width = "100%", h[0].style.width = "100%");
f.each(ua(a, I), function (b, c) {
G = ba(a, b);
c.style.width = a.aoColumns[G].sWidth
});
T && N(function (a) {
a.style.width = ""
}, E);
h = r.outerWidth();
"" === c ? (za.width = "100%", x && (r.find("tbody").height() > k.offsetHeight || "scroll" == m.css("overflow-y")) && (za.width = B(r.outerWidth() - b)), h = r.outerWidth()) : "" !== d && (za.width = B(d), h = r.outerWidth());
N(H, D);
N(function (a) {
z.push(a.innerHTML);
Ya.push(B(f(a).css("width")))
}, D);
N(function (a, b) {
-1 !== f.inArray(a, ac) && (a.style.width = Ya[b])
},
t);
f(D).height(0);
T && (N(H, E), N(function (a) {
C.push(a.innerHTML);
y.push(B(f(a).css("width")))
}, E), N(function (a, b) {
a.style.width = y[b]
}, F), f(E).height(0));
N(function (a, b) {
a.innerHTML = '<div class="dataTables_sizing">' + z[b] + "</div>";
a.childNodes[0].style.height = "0";
a.childNodes[0].style.overflow = "hidden";
a.style.width = Ya[b]
}, D);
T && N(function (a, b) {
a.innerHTML = '<div class="dataTables_sizing">' + C[b] + "</div>";
a.childNodes[0].style.height = "0";
a.childNodes[0].style.overflow = "hidden";
a.style.width = y[b]
}, E);
r.outerWidth() <
h ? (F = k.scrollHeight > k.offsetHeight || "scroll" == m.css("overflow-y") ? h + b : h, x && (k.scrollHeight > k.offsetHeight || "scroll" == m.css("overflow-y")) && (za.width = B(F - b)), "" !== c && "" === d || O(a, 1, "Possible column misalignment", 6)) : F = "100%";
w.width = B(F);
g.width = B(F);
T && (a.nScrollFoot.style.width = B(F));
!e && x && (w.height = B(v.offsetHeight + b));
c = r.outerWidth();
n[0].style.width = B(c);
l.width = B(c);
d = r.height() > k.clientHeight || "scroll" == m.css("overflow-y");
e = "padding" + (A.bScrollbarLeft ? "Left" : "Right");
l[e] = d ? b + "px" : "0px";
T &&
(q[0].style.width = B(c), u[0].style.width = B(c), u[0].style[e] = d ? b + "px" : "0px");
r.children("colgroup").insertBefore(r.children("thead"));
m.trigger("scroll");
!a.bSorted && !a.bFiltered || a._drawHold || (k.scrollTop = 0)
}
}
function N(a, b, c) {
for (var d = 0, e = 0, h = b.length, g, k; e < h;) {
g = b[e].firstChild;
for (k = c ? c[e].firstChild : null; g;) 1 === g.nodeType && (c ? a(g, k, d) : a(g, d), d++), g = g.nextSibling, k = c ? k.nextSibling : null;
e++
}
}
function Ja(a) {
var b = a.nTable,
c = a.aoColumns,
d = a.oScroll,
e = d.sY,
h = d.sX,
g = d.sXInner,
k = c.length,
l = oa(a, "bVisible"),
n = f("th", a.nTHead),
m = b.getAttribute("width"),
p = b.parentNode,
u = !1,
q, t = a.oBrowser;
d = t.bScrollOversize;
(q = b.style.width) && -1 !== q.indexOf("%") && (m = q);
for (q = 0; q < l.length; q++) {
var r = c[l[q]];
null !== r.sWidth && (r.sWidth = Jb(r.sWidthOrig, p), u = !0)
}
if (d || !u && !h && !e && k == W(a) && k == n.length)
for (q = 0; q < k; q++) l = ba(a, q), null !== l && (c[l].sWidth = B(n.eq(q).width()));
else {
k = f(b).clone().css("visibility", "hidden").removeAttr("id");
k.find("tbody tr").remove();
var v = f("<tr/>").appendTo(k.find("tbody"));
k.find("thead, tfoot").remove();
k.append(f(a.nTHead).clone()).append(f(a.nTFoot).clone());
k.find("tfoot th, tfoot td").css("width", "");
n = ua(a, k.find("thead")[0]);
for (q = 0; q < l.length; q++) r = c[l[q]], n[q].style.width = null !== r.sWidthOrig && "" !== r.sWidthOrig ? B(r.sWidthOrig) : "", r.sWidthOrig && h && f(n[q]).append(f("<div/>").css({
width: r.sWidthOrig,
margin: 0,
padding: 0,
border: 0,
height: 1
}));
if (a.aoData.length)
for (q = 0; q < l.length; q++) u = l[q], r = c[u], f(Kb(a, u)).clone(!1).append(r.sContentPadding).appendTo(v);
f("[name]", k).removeAttr("name");
r = f("<div/>").css(h ||
e ? {
position: "absolute",
top: 0,
left: 0,
height: 1,
right: 0,
overflow: "hidden"
} : {}).append(k).appendTo(p);
h && g ? k.width(g) : h ? (k.css("width", "auto"), k.removeAttr("width"), k.width() < p.clientWidth && m && k.width(p.clientWidth)) : e ? k.width(p.clientWidth) : m && k.width(m);
for (q = e = 0; q < l.length; q++) p = f(n[q]), g = p.outerWidth() - p.width(), p = t.bBounding ? Math.ceil(n[q].getBoundingClientRect().width) : p.outerWidth(), e += p, c[l[q]].sWidth = B(p - g);
b.style.width = B(e);
r.remove()
}
m && (b.style.width = B(m));
!m && !h || a._reszEvt || (b = function () {
f(z).on("resize.DT-" +
a.sInstance, Sa(function () {
aa(a)
}))
}, d ? setTimeout(b, 1E3) : b(), a._reszEvt = !0)
}
function Jb(a, b) {
if (!a) return 0;
a = f("<div/>").css("width", B(a)).appendTo(b || y.body);
b = a[0].offsetWidth;
a.remove();
return b
}
function Kb(a, b) {
var c = Lb(a, b);
if (0 > c) return null;
var d = a.aoData[c];
return d.nTr ? d.anCells[b] : f("<td/>").html(I(a, c, b, "display"))[0]
}
function Lb(a, b) {
for (var c, d = -1, e = -1, h = 0, g = a.aoData.length; h < g; h++) c = I(a, h, b, "display") + "", c = c.replace(bc, ""), c = c.replace(/ /g, " "), c.length > d && (d = c.length, e = h);
return e
}
function B(a) {
return null === a ? "0px" : "number" == typeof a ? 0 > a ? "0px" : a + "px" : a.match(/\d$/) ? a + "px" : a
}
function Y(a) {
var b = [],
c = a.aoColumns;
var d = a.aaSortingFixed;
var e = f.isPlainObject(d);
var h = [];
var g = function (a) {
a.length && !f.isArray(a[0]) ? h.push(a) : f.merge(h, a)
};
f.isArray(d) && g(d);
e && d.pre && g(d.pre);
g(a.aaSorting);
e && d.post && g(d.post);
for (a = 0; a < h.length; a++) {
var k = h[a][0];
g = c[k].aDataSort;
d = 0;
for (e = g.length; d < e; d++) {
var l = g[d];
var n = c[l].sType || "string";
h[a]._idx === p && (h[a]._idx = f.inArray(h[a][1], c[l].asSorting));
b.push({
src: k,
col: l,
dir: h[a][1],
index: h[a]._idx,
type: n,
formatter: q.ext.type.order[n + "-pre"]
})
}
}
return b
}
function rb(a) {
var b, c = [],
d = q.ext.type.order,
e = a.aoData,
h = 0,
g = a.aiDisplayMaster;
Ka(a);
var k = Y(a);
var f = 0;
for (b = k.length; f < b; f++) {
var n = k[f];
n.formatter && h++;
Mb(a, n.col)
}
if ("ssp" != D(a) && 0 !== k.length) {
f = 0;
for (b = g.length; f < b; f++) c[g[f]] = f;
h === k.length ? g.sort(function (a, b) {
var d, h = k.length,
g = e[a]._aSortData,
f = e[b]._aSortData;
for (d = 0; d < h; d++) {
var l = k[d];
var m = g[l.col];
var n = f[l.col];
m = m < n ? -1 : m > n ? 1 : 0;
if (0 !== m) return "asc" === l.dir ? m : -m
}
m = c[a];
n = c[b];
return m < n ? -1 : m > n ? 1 : 0
}) : g.sort(function (a, b) {
var h, g = k.length,
f = e[a]._aSortData,
l = e[b]._aSortData;
for (h = 0; h < g; h++) {
var m = k[h];
var n = f[m.col];
var p = l[m.col];
m = d[m.type + "-" + m.dir] || d["string-" + m.dir];
n = m(n, p);
if (0 !== n) return n
}
n = c[a];
p = c[b];
return n < p ? -1 : n > p ? 1 : 0
})
}
a.bSorted = !0
}
function Nb(a) {
var b = a.aoColumns,
c = Y(a);
a = a.oLanguage.oAria;
for (var d = 0, e = b.length; d < e; d++) {
var h = b[d];
var g = h.asSorting;
var k = h.sTitle.replace(/<.*?>/g, "");
var f = h.nTh;
f.removeAttribute("aria-sort");
h.bSortable && (0 < c.length && c[0].col == d ? (f.setAttribute("aria-sort", "asc" == c[0].dir ? "ascending" : "descending"), h = g[c[0].index + 1] || g[0]) : h = g[0], k += "asc" === h ? a.sSortAscending : a.sSortDescending);
f.setAttribute("aria-label", k)
}
}
function Za(a, b, c, d) {
var e = a.aaSorting,
h = a.aoColumns[b].asSorting,
g = function (a, b) {
var c = a._idx;
c === p && (c = f.inArray(a[1], h));
return c + 1 < h.length ? c + 1 : b ? null : 0
};
"number" === typeof e[0] && (e = a.aaSorting = [e]);
c && a.oFeatures.bSortMulti ? (c = f.inArray(b, J(e, "0")), -1 !== c ? (b = g(e[c], !0), null ===
b && 1 === e.length && (b = 0), null === b ? e.splice(c, 1) : (e[c][1] = h[b], e[c]._idx = b)) : (e.push([b, h[0], 0]), e[e.length - 1]._idx = 0)) : e.length && e[0][0] == b ? (b = g(e[0]), e.length = 1, e[0][1] = h[b], e[0]._idx = b) : (e.length = 0, e.push([b, h[0]]), e[0]._idx = 0);
V(a);
"function" == typeof d && d(a)
}
function Qa(a, b, c, d) {
var e = a.aoColumns[c];
$a(b, {}, function (b) {
!1 !== e.bSortable && (a.oFeatures.bProcessing ? (K(a, !0), setTimeout(function () {
Za(a, c, b.shiftKey, d);
"ssp" !== D(a) && K(a, !1)
}, 0)) : Za(a, c, b.shiftKey, d))
})
}
function Aa(a) {
var b = a.aLastSort,
c = a.oClasses.sSortColumn,
d = Y(a),
e = a.oFeatures,
h;
if (e.bSort && e.bSortClasses) {
e = 0;
for (h = b.length; e < h; e++) {
var g = b[e].src;
f(J(a.aoData, "anCells", g)).removeClass(c + (2 > e ? e + 1 : 3))
}
e = 0;
for (h = d.length; e < h; e++) g = d[e].src, f(J(a.aoData, "anCells", g)).addClass(c + (2 > e ? e + 1 : 3))
}
a.aLastSort = d
}
function Mb(a, b) {
var c = a.aoColumns[b],
d = q.ext.order[c.sSortDataType],
e;
d && (e = d.call(a.oInstance, a, b, ca(a, b)));
for (var h, g = q.ext.type.order[c.sType + "-pre"], k = 0, f = a.aoData.length; k < f; k++)
if (c = a.aoData[k], c._aSortData || (c._aSortData = []), !c._aSortData[b] || d) h = d ? e[k] : I(a, k, b, "sort"), c._aSortData[b] = g ? g(h) : h
}
function Ba(a) {
if (a.oFeatures.bStateSave && !a.bDestroying) {
var b = {
time: +new Date,
start: a._iDisplayStart,
length: a._iDisplayLength,
order: f.extend(!0, [], a.aaSorting),
search: Fb(a.oPreviousSearch),
columns: f.map(a.aoColumns, function (b, d) {
return {
visible: b.bVisible,
search: Fb(a.aoPreSearchCols[d])
}
})
};
A(a, "aoStateSaveParams", "stateSaveParams", [a, b]);
a.oSavedState = b;
a.fnStateSaveCallback.call(a.oInstance, a, b)
}
}
function Ob(a, b, c) {
var d,
e, h = a.aoColumns;
b = function (b) {
if (b && b.time) {
var g = A(a, "aoStateLoadParams", "stateLoadParams", [a, b]);
if (-1 === f.inArray(!1, g) && (g = a.iStateDuration, !(0 < g && b.time < +new Date - 1E3 * g || b.columns && h.length !== b.columns.length))) {
a.oLoadedState = f.extend(!0, {}, b);
b.start !== p && (a._iDisplayStart = b.start, a.iInitDisplayStart = b.start);
b.length !== p && (a._iDisplayLength = b.length);
b.order !== p && (a.aaSorting = [], f.each(b.order, function (b, c) {
a.aaSorting.push(c[0] >= h.length ? [0, c[1]] : c)
}));
b.search !== p && f.extend(a.oPreviousSearch,
Gb(b.search));
if (b.columns)
for (d = 0, e = b.columns.length; d < e; d++) g = b.columns[d], g.visible !== p && (h[d].bVisible = g.visible), g.search !== p && f.extend(a.aoPreSearchCols[d], Gb(g.search));
A(a, "aoStateLoaded", "stateLoaded", [a, b])
}
}
c()
};
if (a.oFeatures.bStateSave) {
var g = a.fnStateLoadCallback.call(a.oInstance, a, b);
g !== p && b(g)
} else c()
}
function Ca(a) {
var b = q.settings;
a = f.inArray(a, J(b, "nTable"));
return -1 !== a ? b[a] : null
}
function O(a, b, c, d) {
c = "DataTables warning: " + (a ? "table id=" + a.sTableId + " - " : "") + c;
d && (c += ". For more information about this error, please see http://datatables.net/tn/" +
d);
if (b) z.console && console.log && console.log(c);
else if (b = q.ext, b = b.sErrMode || b.errMode, a && A(a, null, "error", [a, d, c]), "alert" == b) alert(c);
else {
if ("throw" == b) throw Error(c);
"function" == typeof b && b(a, d, c)
}
}
function M(a, b, c, d) {
f.isArray(c) ? f.each(c, function (c, d) {
f.isArray(d) ? M(a, b, d[0], d[1]) : M(a, b, d)
}) : (d === p && (d = c), b[c] !== p && (a[d] = b[c]))
}
function ab(a, b, c) {
var d;
for (d in b)
if (b.hasOwnProperty(d)) {
var e = b[d];
f.isPlainObject(e) ? (f.isPlainObject(a[d]) || (a[d] = {}), f.extend(!0, a[d], e)) : c && "data" !== d && "aaData" !==
d && f.isArray(e) ? a[d] = e.slice() : a[d] = e
} return a
}
function $a(a, b, c) {
f(a).on("click.DT", b, function (b) {
f(a).blur();
c(b)
}).on("keypress.DT", b, function (a) {
13 === a.which && (a.preventDefault(), c(a))
}).on("selectstart.DT", function () {
return !1
})
}
function E(a, b, c, d) {
c && a[b].push({
fn: c,
sName: d
})
}
function A(a, b, c, d) {
var e = [];
b && (e = f.map(a[b].slice().reverse(), function (b, c) {
return b.fn.apply(a.oInstance, d)
}));
null !== c && (b = f.Event(c + ".dt"), f(a.nTable).trigger(b, d), e.push(b.result));
return e
}
function Wa(a) {
var b = a._iDisplayStart,
c = a.fnDisplayEnd(),
d = a._iDisplayLength;
b >= c && (b = c - d);
b -= b % d;
if (-1 === d || 0 > b) b = 0;
a._iDisplayStart = b
}
function Ra(a, b) {
a = a.renderer;
var c = q.ext.renderer[b];
return f.isPlainObject(a) && a[b] ? c[a[b]] || c._ : "string" === typeof a ? c[a] || c._ : c._
}
function D(a) {
return a.oFeatures.bServerSide ? "ssp" : a.ajax || a.sAjaxSource ? "ajax" : "dom"
}
function ka(a, b) {
var c = Pb.numbers_length,
d = Math.floor(c / 2);
b <= c ? a = Z(0, b) : a <= d ? (a = Z(0, c - 2), a.push("ellipsis"), a.push(b - 1)) : (a >= b - 1 - d ? a = Z(b - (c - 2), b) : (a = Z(a - d + 2, a + d - 1), a.push("ellipsis"),
a.push(b - 1)), a.splice(0, 0, "ellipsis"), a.splice(0, 0, 0));
a.DT_el = "span";
return a
}
function Ha(a) {
f.each({
num: function (b) {
return Da(b, a)
},
"num-fmt": function (b) {
return Da(b, a, bb)
},
"html-num": function (b) {
return Da(b, a, Ea)
},
"html-num-fmt": function (b) {
return Da(b, a, Ea, bb)
}
}, function (b, c) {
C.type.order[b + a + "-pre"] = c;
b.match(/^html\-/) && (C.type.search[b + a] = C.type.search.html)
})
}
function Qb(a) {
return function () {
var b = [Ca(this[q.ext.iApiIndex])].concat(Array.prototype.slice.call(arguments));
return q.ext.internal[a].apply(this,
b)
}
}
var q = function (a) {
this.$ = function (a, b) {
return this.api(!0).$(a, b)
};
this._ = function (a, b) {
return this.api(!0).rows(a, b).data()
};
this.api = function (a) {
return a ? new v(Ca(this[C.iApiIndex])) : new v(this)
};
this.fnAddData = function (a, b) {
var c = this.api(!0);
a = f.isArray(a) && (f.isArray(a[0]) || f.isPlainObject(a[0])) ? c.rows.add(a) : c.row.add(a);
(b === p || b) && c.draw();
return a.flatten().toArray()
};
this.fnAdjustColumnSizing = function (a) {
var b = this.api(!0).columns.adjust(),
c = b.settings()[0],
d = c.oScroll;
a === p || a ? b.draw(!1) :
("" !== d.sX || "" !== d.sY) && na(c)
};
this.fnClearTable = function (a) {
var b = this.api(!0).clear();
(a === p || a) && b.draw()
};
this.fnClose = function (a) {
this.api(!0).row(a).child.hide()
};
this.fnDeleteRow = function (a, b, c) {
var d = this.api(!0);
a = d.rows(a);
var e = a.settings()[0],
h = e.aoData[a[0][0]];
a.remove();
b && b.call(this, e, h);
(c === p || c) && d.draw();
return h
};
this.fnDestroy = function (a) {
this.api(!0).destroy(a)
};
this.fnDraw = function (a) {
this.api(!0).draw(a)
};
this.fnFilter = function (a, b, c, d, e, f) {
e = this.api(!0);
null === b || b === p ?
e.search(a, c, d, f) : e.column(b).search(a, c, d, f);
e.draw()
};
this.fnGetData = function (a, b) {
var c = this.api(!0);
if (a !== p) {
var d = a.nodeName ? a.nodeName.toLowerCase() : "";
return b !== p || "td" == d || "th" == d ? c.cell(a, b).data() : c.row(a).data() || null
}
return c.data().toArray()
};
this.fnGetNodes = function (a) {
var b = this.api(!0);
return a !== p ? b.row(a).node() : b.rows().nodes().flatten().toArray()
};
this.fnGetPosition = function (a) {
var b = this.api(!0),
c = a.nodeName.toUpperCase();
return "TR" == c ? b.row(a).index() : "TD" == c || "TH" == c ? (a = b.cell(a).index(),
[a.row, a.columnVisible, a.column]) : null
};
this.fnIsOpen = function (a) {
return this.api(!0).row(a).child.isShown()
};
this.fnOpen = function (a, b, c) {
return this.api(!0).row(a).child(b, c).show().child()[0]
};
this.fnPageChange = function (a, b) {
a = this.api(!0).page(a);
(b === p || b) && a.draw(!1)
};
this.fnSetColumnVis = function (a, b, c) {
a = this.api(!0).column(a).visible(b);
(c === p || c) && a.columns.adjust().draw()
};
this.fnSettings = function () {
return Ca(this[C.iApiIndex])
};
this.fnSort = function (a) {
this.api(!0).order(a).draw()
};
this.fnSortListener =
function (a, b, c) {
this.api(!0).order.listener(a, b, c)
};
this.fnUpdate = function (a, b, c, d, e) {
var h = this.api(!0);
c === p || null === c ? h.row(b).data(a) : h.cell(b, c).data(a);
(e === p || e) && h.columns.adjust();
(d === p || d) && h.draw();
return 0
};
this.fnVersionCheck = C.fnVersionCheck;
var b = this,
c = a === p,
d = this.length;
c && (a = {});
this.oApi = this.internal = C.internal;
for (var e in q.ext.internal) e && (this[e] = Qb(e));
this.each(function () {
var e = {},
g = 1 < d ? ab(e, a, !0) : a,
k = 0,
l;
e = this.getAttribute("id");
var n = !1,
m = q.defaults,
w = f(this);
if ("table" !=
this.nodeName.toLowerCase()) O(null, 0, "Non-table node initialisation (" + this.nodeName + ")", 2);
else {
jb(m);
kb(m.column);
L(m, m, !0);
L(m.column, m.column, !0);
L(m, f.extend(g, w.data()), !0);
var u = q.settings;
k = 0;
for (l = u.length; k < l; k++) {
var t = u[k];
if (t.nTable == this || t.nTHead && t.nTHead.parentNode == this || t.nTFoot && t.nTFoot.parentNode == this) {
var v = g.bRetrieve !== p ? g.bRetrieve : m.bRetrieve;
if (c || v) return t.oInstance;
if (g.bDestroy !== p ? g.bDestroy : m.bDestroy) {
t.oInstance.fnDestroy();
break
} else {
O(t, 0, "Cannot reinitialise DataTable",
3);
return
}
}
if (t.sTableId == this.id) {
u.splice(k, 1);
break
}
}
if (null === e || "" === e) this.id = e = "DataTables_Table_" + q.ext._unique++;
var r = f.extend(!0, {}, q.models.oSettings, {
sDestroyWidth: w[0].style.width,
sInstance: e,
sTableId: e
});
r.nTable = this;
r.oApi = b.internal;
r.oInit = g;
u.push(r);
r.oInstance = 1 === b.length ? b : w.dataTable();
jb(g);
Ga(g.oLanguage);
g.aLengthMenu && !g.iDisplayLength && (g.iDisplayLength = f.isArray(g.aLengthMenu[0]) ? g.aLengthMenu[0][0] : g.aLengthMenu[0]);
g = ab(f.extend(!0, {}, m), g);
M(r.oFeatures, g, "bPaginate bLengthChange bFilter bSort bSortMulti bInfo bProcessing bAutoWidth bSortClasses bServerSide bDeferRender".split(" "));
M(r, g, ["asStripeClasses", "ajax", "fnServerData", "fnFormatNumber", "sServerMethod", "aaSorting", "aaSortingFixed", "aLengthMenu", "sPaginationType", "sAjaxSource", "sAjaxDataProp", "iStateDuration", "sDom", "bSortCellsTop", "iTabIndex", "fnStateLoadCallback", "fnStateSaveCallback", "renderer", "searchDelay", "rowId", ["iCookieDuration", "iStateDuration"],
["oSearch", "oPreviousSearch"],
["aoSearchCols", "aoPreSearchCols"],
["iDisplayLength", "_iDisplayLength"]
]);
M(r.oScroll, g, [
["sScrollX", "sX"],
["sScrollXInner", "sXInner"],
["sScrollY", "sY"],
["bScrollCollapse", "bCollapse"]
]);
M(r.oLanguage, g, "fnInfoCallback");
E(r, "aoDrawCallback", g.fnDrawCallback, "user");
E(r, "aoServerParams", g.fnServerParams, "user");
E(r, "aoStateSaveParams", g.fnStateSaveParams, "user");
E(r, "aoStateLoadParams", g.fnStateLoadParams, "user");
E(r, "aoStateLoaded", g.fnStateLoaded, "user");
E(r, "aoRowCallback", g.fnRowCallback, "user");
E(r, "aoRowCreatedCallback", g.fnCreatedRow, "user");
E(r, "aoHeaderCallback", g.fnHeaderCallback, "user");
E(r, "aoFooterCallback", g.fnFooterCallback,
"user");
E(r, "aoInitComplete", g.fnInitComplete, "user");
E(r, "aoPreDrawCallback", g.fnPreDrawCallback, "user");
r.rowIdFn = U(g.rowId);
lb(r);
var x = r.oClasses;
f.extend(x, q.ext.classes, g.oClasses);
w.addClass(x.sTable);
r.iInitDisplayStart === p && (r.iInitDisplayStart = g.iDisplayStart, r._iDisplayStart = g.iDisplayStart);
null !== g.iDeferLoading && (r.bDeferLoading = !0, e = f.isArray(g.iDeferLoading), r._iRecordsDisplay = e ? g.iDeferLoading[0] : g.iDeferLoading, r._iRecordsTotal = e ? g.iDeferLoading[1] : g.iDeferLoading);
var y = r.oLanguage;
f.extend(!0, y, g.oLanguage);
y.sUrl && (f.ajax({
dataType: "json",
url: y.sUrl,
success: function (a) {
Ga(a);
L(m.oLanguage, a);
f.extend(!0, y, a);
ja(r)
},
error: function () {
ja(r)
}
}), n = !0);
null === g.asStripeClasses && (r.asStripeClasses = [x.sStripeOdd, x.sStripeEven]);
e = r.asStripeClasses;
var z = w.children("tbody").find("tr").eq(0); - 1 !== f.inArray(!0, f.map(e, function (a, b) {
return z.hasClass(a)
})) && (f("tbody tr", this).removeClass(e.join(" ")), r.asDestroyStripes = e.slice());
e = [];
u = this.getElementsByTagName("thead");
0 !== u.length &&
(fa(r.aoHeader, u[0]), e = ua(r));
if (null === g.aoColumns)
for (u = [], k = 0, l = e.length; k < l; k++) u.push(null);
else u = g.aoColumns;
k = 0;
for (l = u.length; k < l; k++) Ia(r, e ? e[k] : null);
nb(r, g.aoColumnDefs, u, function (a, b) {
ma(r, a, b)
});
if (z.length) {
var B = function (a, b) {
return null !== a.getAttribute("data-" + b) ? b : null
};
f(z[0]).children("th, td").each(function (a, b) {
var c = r.aoColumns[a];
if (c.mData === a) {
var d = B(b, "sort") || B(b, "order");
b = B(b, "filter") || B(b, "search");
if (null !== d || null !== b) c.mData = {
_: a + ".display",
sort: null !== d ? a + ".@data-" +
d : p,
type: null !== d ? a + ".@data-" + d : p,
filter: null !== b ? a + ".@data-" + b : p
}, ma(r, a)
}
})
}
var C = r.oFeatures;
e = function () {
if (g.aaSorting === p) {
var a = r.aaSorting;
k = 0;
for (l = a.length; k < l; k++) a[k][1] = r.aoColumns[k].asSorting[0]
}
Aa(r);
C.bSort && E(r, "aoDrawCallback", function () {
if (r.bSorted) {
var a = Y(r),
b = {};
f.each(a, function (a, c) {
b[c.src] = c.dir
});
A(r, null, "order", [r, a, b]);
Nb(r)
}
});
E(r, "aoDrawCallback", function () {
(r.bSorted || "ssp" === D(r) || C.bDeferRender) && Aa(r)
}, "sc");
a = w.children("caption").each(function () {
this._captionSide =
f(this).css("caption-side")
});
var b = w.children("thead");
0 === b.length && (b = f("<thead/>").appendTo(w));
r.nTHead = b[0];
b = w.children("tbody");
0 === b.length && (b = f("<tbody/>").appendTo(w));
r.nTBody = b[0];
b = w.children("tfoot");
0 === b.length && 0 < a.length && ("" !== r.oScroll.sX || "" !== r.oScroll.sY) && (b = f("<tfoot/>").appendTo(w));
0 === b.length || 0 === b.children().length ? w.addClass(x.sNoFooter) : 0 < b.length && (r.nTFoot = b[0], fa(r.aoFooter, r.nTFoot));
if (g.aaData)
for (k = 0; k < g.aaData.length; k++) R(r, g.aaData[k]);
else(r.bDeferLoading ||
"dom" == D(r)) && pa(r, f(r.nTBody).children("tr"));
r.aiDisplay = r.aiDisplayMaster.slice();
r.bInitialised = !0;
!1 === n && ja(r)
};
g.bStateSave ? (C.bStateSave = !0, E(r, "aoDrawCallback", Ba, "state_save"), Ob(r, g, e)) : e()
}
});
b = null;
return this
},
C, t, x, cb = {},
Rb = /[\r\n\u2028]/g,
Ea = /<.*?>/g,
cc = /^\d{2,4}[\.\/\-]\d{1,2}[\.\/\-]\d{1,2}([T ]{1}\d{1,2}[:\.]\d{2}([\.:]\d{2})?)?$/,
dc = /(\/|\.|\*|\+|\?|\||\(|\)|\[|\]|\{|\}|\\|\$|\^|\-)/g,
bb = /[',$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfkɃΞ]/gi,
P = function (a) {
return a && !0 !== a && "-" !== a ? !1 :
!0
},
Sb = function (a) {
var b = parseInt(a, 10);
return !isNaN(b) && isFinite(a) ? b : null
},
Tb = function (a, b) {
cb[b] || (cb[b] = new RegExp(Ua(b), "g"));
return "string" === typeof a && "." !== b ? a.replace(/\./g, "").replace(cb[b], ".") : a
},
db = function (a, b, c) {
var d = "string" === typeof a;
if (P(a)) return !0;
b && d && (a = Tb(a, b));
c && d && (a = a.replace(bb, ""));
return !isNaN(parseFloat(a)) && isFinite(a)
},
Ub = function (a, b, c) {
return P(a) ? !0 : P(a) || "string" === typeof a ? db(a.replace(Ea, ""), b, c) ? !0 : null : null
},
J = function (a, b, c) {
var d = [],
e = 0,
h = a.length;
if (c !==
p)
for (; e < h; e++) a[e] && a[e][b] && d.push(a[e][b][c]);
else
for (; e < h; e++) a[e] && d.push(a[e][b]);
return d
},
la = function (a, b, c, d) {
var e = [],
h = 0,
g = b.length;
if (d !== p)
for (; h < g; h++) a[b[h]][c] && e.push(a[b[h]][c][d]);
else
for (; h < g; h++) e.push(a[b[h]][c]);
return e
},
Z = function (a, b) {
var c = [];
if (b === p) {
b = 0;
var d = a
} else d = b, b = a;
for (a = b; a < d; a++) c.push(a);
return c
},
Vb = function (a) {
for (var b = [], c = 0, d = a.length; c < d; c++) a[c] && b.push(a[c]);
return b
},
ta = function (a) {
a: {
if (!(2 > a.length)) {
var b = a.slice().sort();
for (var c = b[0], d = 1,
e = b.length; d < e; d++) {
if (b[d] === c) {
b = !1;
break a
}
c = b[d]
}
}
b = !0
}
if (b) return a.slice();b = [];e = a.length;
var h, g = 0;d = 0;a: for (; d < e; d++) {
c = a[d];
for (h = 0; h < g; h++)
if (b[h] === c) continue a;
b.push(c);
g++
}
return b
};
q.util = {
throttle: function (a, b) {
var c = b !== p ? b : 200,
d, e;
return function () {
var b = this,
g = +new Date,
f = arguments;
d && g < d + c ? (clearTimeout(e), e = setTimeout(function () {
d = p;
a.apply(b, f)
}, c)) : (d = g, a.apply(b, f))
}
},
escapeRegex: function (a) {
return a.replace(dc, "\\$1")
}
};
var F = function (a, b, c) {
a[b] !== p && (a[c] = a[b])
},
da = /\[.*?\]$/,
X = /\(\)$/,
Ua = q.util.escapeRegex,
ya = f("<div>")[0],
$b = ya.textContent !== p,
bc = /<.*?>/g,
Sa = q.util.throttle,
Wb = [],
G = Array.prototype,
ec = function (a) {
var b, c = q.settings,
d = f.map(c, function (a, b) {
return a.nTable
});
if (a) {
if (a.nTable && a.oApi) return [a];
if (a.nodeName && "table" === a.nodeName.toLowerCase()) {
var e = f.inArray(a, d);
return -1 !== e ? [c[e]] : null
}
if (a && "function" === typeof a.settings) return a.settings().toArray();
"string" === typeof a ? b = f(a) : a instanceof f && (b = a)
} else return [];
if (b) return b.map(function (a) {
e = f.inArray(this,
d);
return -1 !== e ? c[e] : null
}).toArray()
};
var v = function (a, b) {
if (!(this instanceof v)) return new v(a, b);
var c = [],
d = function (a) {
(a = ec(a)) && c.push.apply(c, a)
};
if (f.isArray(a))
for (var e = 0, h = a.length; e < h; e++) d(a[e]);
else d(a);
this.context = ta(c);
b && f.merge(this, b);
this.selector = {
rows: null,
cols: null,
opts: null
};
v.extend(this, this, Wb)
};
q.Api = v;
f.extend(v.prototype, {
any: function () {
return 0 !== this.count()
},
concat: G.concat,
context: [],
count: function () {
return this.flatten().length
},
each: function (a) {
for (var b = 0, c =
this.length; b < c; b++) a.call(this, this[b], b, this);
return this
},
eq: function (a) {
var b = this.context;
return b.length > a ? new v(b[a], this[a]) : null
},
filter: function (a) {
var b = [];
if (G.filter) b = G.filter.call(this, a, this);
else
for (var c = 0, d = this.length; c < d; c++) a.call(this, this[c], c, this) && b.push(this[c]);
return new v(this.context, b)
},
flatten: function () {
var a = [];
return new v(this.context, a.concat.apply(a, this.toArray()))
},
join: G.join,
indexOf: G.indexOf || function (a, b) {
b = b || 0;
for (var c = this.length; b < c; b++)
if (this[b] ===
a) return b;
return -1
},
iterator: function (a, b, c, d) {
var e = [],
h, g, f = this.context,
l, n = this.selector;
"string" === typeof a && (d = c, c = b, b = a, a = !1);
var m = 0;
for (h = f.length; m < h; m++) {
var q = new v(f[m]);
if ("table" === b) {
var u = c.call(q, f[m], m);
u !== p && e.push(u)
} else if ("columns" === b || "rows" === b) u = c.call(q, f[m], this[m], m), u !== p && e.push(u);
else if ("column" === b || "column-rows" === b || "row" === b || "cell" === b) {
var t = this[m];
"column-rows" === b && (l = Fa(f[m], n.opts));
var x = 0;
for (g = t.length; x < g; x++) u = t[x], u = "cell" === b ? c.call(q, f[m], u.row,
u.column, m, x) : c.call(q, f[m], u, m, x, l), u !== p && e.push(u)
}
}
return e.length || d ? (a = new v(f, a ? e.concat.apply([], e) : e), b = a.selector, b.rows = n.rows, b.cols = n.cols, b.opts = n.opts, a) : this
},
lastIndexOf: G.lastIndexOf || function (a, b) {
return this.indexOf.apply(this.toArray.reverse(), arguments)
},
length: 0,
map: function (a) {
var b = [];
if (G.map) b = G.map.call(this, a, this);
else
for (var c = 0, d = this.length; c < d; c++) b.push(a.call(this, this[c], c));
return new v(this.context, b)
},
pluck: function (a) {
return this.map(function (b) {
return b[a]
})
},
pop: G.pop,
push: G.push,
reduce: G.reduce || function (a, b) {
return mb(this, a, b, 0, this.length, 1)
},
reduceRight: G.reduceRight || function (a, b) {
return mb(this, a, b, this.length - 1, -1, -1)
},
reverse: G.reverse,
selector: null,
shift: G.shift,
slice: function () {
return new v(this.context, this)
},
sort: G.sort,
splice: G.splice,
toArray: function () {
return G.slice.call(this)
},
to$: function () {
return f(this)
},
toJQuery: function () {
return f(this)
},
unique: function () {
return new v(this.context, ta(this))
},
unshift: G.unshift
});
v.extend = function (a,
b, c) {
if (c.length && b && (b instanceof v || b.__dt_wrapper)) {
var d, e = function (a, b, c) {
return function () {
var d = b.apply(a, arguments);
v.extend(d, d, c.methodExt);
return d
}
};
var h = 0;
for (d = c.length; h < d; h++) {
var g = c[h];
b[g.name] = "function" === g.type ? e(a, g.val, g) : "object" === g.type ? {} : g.val;
b[g.name].__dt_wrapper = !0;
v.extend(a, b[g.name], g.propExt)
}
}
};
v.register = t = function (a, b) {
if (f.isArray(a))
for (var c = 0, d = a.length; c < d; c++) v.register(a[c], b);
else {
d = a.split(".");
var e = Wb,
h;
a = 0;
for (c = d.length; a < c; a++) {
var g = (h = -1 !==
d[a].indexOf("()")) ? d[a].replace("()", "") : d[a];
a: {
var k = 0;
for (var l = e.length; k < l; k++)
if (e[k].name === g) {
k = e[k];
break a
} k = null
}
k || (k = {
name: g,
val: {},
methodExt: [],
propExt: [],
type: "object"
}, e.push(k));
a === c - 1 ? (k.val = b, k.type = "function" === typeof b ? "function" : f.isPlainObject(b) ? "object" : "other") : e = h ? k.methodExt : k.propExt
}
}
};
v.registerPlural = x = function (a, b, c) {
v.register(a, c);
v.register(b, function () {
var a = c.apply(this, arguments);
return a === this ? this : a instanceof v ? a.length ? f.isArray(a[0]) ? new v(a.context,
a[0]) : a[0] : p : a
})
};
var fc = function (a, b) {
if ("number" === typeof a) return [b[a]];
var c = f.map(b, function (a, b) {
return a.nTable
});
return f(c).filter(a).map(function (a) {
a = f.inArray(this, c);
return b[a]
}).toArray()
};
t("tables()", function (a) {
return a ? new v(fc(a, this.context)) : this
});
t("table()", function (a) {
a = this.tables(a);
var b = a.context;
return b.length ? new v(b[0]) : a
});
x("tables().nodes()", "table().node()", function () {
return this.iterator("table", function (a) {
return a.nTable
}, 1)
});
x("tables().body()", "table().body()",
function () {
return this.iterator("table", function (a) {
return a.nTBody
}, 1)
});
x("tables().header()", "table().header()", function () {
return this.iterator("table", function (a) {
return a.nTHead
}, 1)
});
x("tables().footer()", "table().footer()", function () {
return this.iterator("table", function (a) {
return a.nTFoot
}, 1)
});
x("tables().containers()", "table().container()", function () {
return this.iterator("table", function (a) {
return a.nTableWrapper
}, 1)
});
t("draw()", function (a) {
return this.iterator("table", function (b) {
"page" ===
a ? S(b) : ("string" === typeof a && (a = "full-hold" === a ? !1 : !0), V(b, !1 === a))
})
});
t("page()", function (a) {
return a === p ? this.page.info().page : this.iterator("table", function (b) {
Xa(b, a)
})
});
t("page.info()", function (a) {
if (0 === this.context.length) return p;
a = this.context[0];
var b = a._iDisplayStart,
c = a.oFeatures.bPaginate ? a._iDisplayLength : -1,
d = a.fnRecordsDisplay(),
e = -1 === c;
return {
page: e ? 0 : Math.floor(b / c),
pages: e ? 1 : Math.ceil(d / c),
start: b,
end: a.fnDisplayEnd(),
length: c,
recordsTotal: a.fnRecordsTotal(),
recordsDisplay: d,
serverSide: "ssp" === D(a)
}
});
t("page.len()", function (a) {
return a === p ? 0 !== this.context.length ? this.context[0]._iDisplayLength : p : this.iterator("table", function (b) {
Va(b, a)
})
});
var Xb = function (a, b, c) {
if (c) {
var d = new v(a);
d.one("draw", function () {
c(d.ajax.json())
})
}
if ("ssp" == D(a)) V(a, b);
else {
K(a, !0);
var e = a.jqXHR;
e && 4 !== e.readyState && e.abort();
va(a, [], function (c) {
qa(a);
c = wa(a, c);
for (var d = 0, e = c.length; d < e; d++) R(a, c[d]);
V(a, b);
K(a, !1)
})
}
};
t("ajax.json()", function () {
var a = this.context;
if (0 < a.length) return a[0].json
});
t("ajax.params()", function () {
var a = this.context;
if (0 < a.length) return a[0].oAjaxData
});
t("ajax.reload()", function (a, b) {
return this.iterator("table", function (c) {
Xb(c, !1 === b, a)
})
});
t("ajax.url()", function (a) {
var b = this.context;
if (a === p) {
if (0 === b.length) return p;
b = b[0];
return b.ajax ? f.isPlainObject(b.ajax) ? b.ajax.url : b.ajax : b.sAjaxSource
}
return this.iterator("table", function (b) {
f.isPlainObject(b.ajax) ? b.ajax.url = a : b.ajax = a
})
});
t("ajax.url().load()", function (a, b) {
return this.iterator("table", function (c) {
Xb(c,
!1 === b, a)
})
});
var eb = function (a, b, c, d, e) {
var h = [],
g, k, l;
var n = typeof b;
b && "string" !== n && "function" !== n && b.length !== p || (b = [b]);
n = 0;
for (k = b.length; n < k; n++) {
var m = b[n] && b[n].split && !b[n].match(/[\[\(:]/) ? b[n].split(",") : [b[n]];
var q = 0;
for (l = m.length; q < l; q++)(g = c("string" === typeof m[q] ? f.trim(m[q]) : m[q])) && g.length && (h = h.concat(g))
}
a = C.selector[a];
if (a.length)
for (n = 0, k = a.length; n < k; n++) h = a[n](d, e, h);
return ta(h)
},
fb = function (a) {
a || (a = {});
a.filter && a.search === p && (a.search = a.filter);
return f.extend({
search: "none",
order: "current",
page: "all"
}, a)
},
gb = function (a) {
for (var b = 0, c = a.length; b < c; b++)
if (0 < a[b].length) return a[0] = a[b], a[0].length = 1, a.length = 1, a.context = [a.context[b]], a;
a.length = 0;
return a
},
Fa = function (a, b) {
var c = [],
d = a.aiDisplay;
var e = a.aiDisplayMaster;
var h = b.search;
var g = b.order;
b = b.page;
if ("ssp" == D(a)) return "removed" === h ? [] : Z(0, e.length);
if ("current" == b)
for (g = a._iDisplayStart, a = a.fnDisplayEnd(); g < a; g++) c.push(d[g]);
else if ("current" == g || "applied" == g)
if ("none" == h) c = e.slice();
else if ("applied" == h) c =
d.slice();
else {
if ("removed" == h) {
var k = {};
g = 0;
for (a = d.length; g < a; g++) k[d[g]] = null;
c = f.map(e, function (a) {
return k.hasOwnProperty(a) ? null : a
})
}
} else if ("index" == g || "original" == g)
for (g = 0, a = a.aoData.length; g < a; g++) "none" == h ? c.push(g) : (e = f.inArray(g, d), (-1 === e && "removed" == h || 0 <= e && "applied" == h) && c.push(g));
return c
},
gc = function (a, b, c) {
var d;
return eb("row", b, function (b) {
var e = Sb(b),
g = a.aoData;
if (null !== e && !c) return [e];
d || (d = Fa(a, c));
if (null !== e && -1 !== f.inArray(e, d)) return [e];
if (null === b || b === p || "" === b) return d;
if ("function" === typeof b) return f.map(d, function (a) {
var c = g[a];
return b(a, c._aData, c.nTr) ? a : null
});
if (b.nodeName) {
e = b._DT_RowIndex;
var k = b._DT_CellIndex;
if (e !== p) return g[e] && g[e].nTr === b ? [e] : [];
if (k) return g[k.row] && g[k.row].nTr === b.parentNode ? [k.row] : [];
e = f(b).closest("*[data-dt-row]");
return e.length ? [e.data("dt-row")] : []
}
if ("string" === typeof b && "#" === b.charAt(0) && (e = a.aIds[b.replace(/^#/, "")], e !== p)) return [e.idx];
e = Vb(la(a.aoData, d, "nTr"));
return f(e).filter(b).map(function () {
return this._DT_RowIndex
}).toArray()
},
a, c)
};
t("rows()", function (a, b) {
a === p ? a = "" : f.isPlainObject(a) && (b = a, a = "");
b = fb(b);
var c = this.iterator("table", function (c) {
return gc(c, a, b)
}, 1);
c.selector.rows = a;
c.selector.opts = b;
return c
});
t("rows().nodes()", function () {
return this.iterator("row", function (a, b) {
return a.aoData[b].nTr || p
}, 1)
});
t("rows().data()", function () {
return this.iterator(!0, "rows", function (a, b) {
return la(a.aoData, b, "_aData")
}, 1)
});
x("rows().cache()", "row().cache()", function (a) {
return this.iterator("row", function (b, c) {
b = b.aoData[c];
return "search" === a ? b._aFilterData : b._aSortData
}, 1)
});
x("rows().invalidate()", "row().invalidate()", function (a) {
return this.iterator("row", function (b, c) {
ea(b, c, a)
})
});
x("rows().indexes()", "row().index()", function () {
return this.iterator("row", function (a, b) {
return b
}, 1)
});
x("rows().ids()", "row().id()", function (a) {
for (var b = [], c = this.context, d = 0, e = c.length; d < e; d++)
for (var h = 0, g = this[d].length; h < g; h++) {
var f = c[d].rowIdFn(c[d].aoData[this[d][h]]._aData);
b.push((!0 === a ? "#" : "") + f)
}
return new v(c, b)
});
x("rows().remove()",
"row().remove()",
function () {
var a = this;
this.iterator("row", function (b, c, d) {
var e = b.aoData,
h = e[c],
g, f;
e.splice(c, 1);
var l = 0;
for (g = e.length; l < g; l++) {
var n = e[l];
var m = n.anCells;
null !== n.nTr && (n.nTr._DT_RowIndex = l);
if (null !== m)
for (n = 0, f = m.length; n < f; n++) m[n]._DT_CellIndex.row = l
}
ra(b.aiDisplayMaster, c);
ra(b.aiDisplay, c);
ra(a[d], c, !1);
0 < b._iRecordsDisplay && b._iRecordsDisplay--;
Wa(b);
c = b.rowIdFn(h._aData);
c !== p && delete b.aIds[c]
});
this.iterator("table", function (a) {
for (var b = 0, d = a.aoData.length; b < d; b++) a.aoData[b].idx =
b
});
return this
});
t("rows.add()", function (a) {
var b = this.iterator("table", function (b) {
var c, d = [];
var g = 0;
for (c = a.length; g < c; g++) {
var f = a[g];
f.nodeName && "TR" === f.nodeName.toUpperCase() ? d.push(pa(b, f)[0]) : d.push(R(b, f))
}
return d
}, 1),
c = this.rows(-1);
c.pop();
f.merge(c, b);
return c
});
t("row()", function (a, b) {
return gb(this.rows(a, b))
});
t("row().data()", function (a) {
var b = this.context;
if (a === p) return b.length && this.length ? b[0].aoData[this[0]]._aData : p;
var c = b[0].aoData[this[0]];
c._aData = a;
f.isArray(a) && c.nTr.id &&
Q(b[0].rowId)(a, c.nTr.id);
ea(b[0], this[0], "data");
return this
});
t("row().node()", function () {
var a = this.context;
return a.length && this.length ? a[0].aoData[this[0]].nTr || null : null
});
t("row.add()", function (a) {
a instanceof f && a.length && (a = a[0]);
var b = this.iterator("table", function (b) {
return a.nodeName && "TR" === a.nodeName.toUpperCase() ? pa(b, a)[0] : R(b, a)
});
return this.row(b[0])
});
var hc = function (a, b, c, d) {
var e = [],
h = function (b, c) {
if (f.isArray(b) || b instanceof f)
for (var d = 0, g = b.length; d < g; d++) h(b[d], c);
else b.nodeName &&
"tr" === b.nodeName.toLowerCase() ? e.push(b) : (d = f("<tr><td/></tr>").addClass(c), f("td", d).addClass(c).html(b)[0].colSpan = W(a), e.push(d[0]))
};
h(c, d);
b._details && b._details.detach();
b._details = f(e);
b._detailsShow && b._details.insertAfter(b.nTr)
},
hb = function (a, b) {
var c = a.context;
c.length && (a = c[0].aoData[b !== p ? b : a[0]]) && a._details && (a._details.remove(), a._detailsShow = p, a._details = p)
},
Yb = function (a, b) {
var c = a.context;
c.length && a.length && (a = c[0].aoData[a[0]], a._details && ((a._detailsShow = b) ? a._details.insertAfter(a.nTr) :
a._details.detach(), ic(c[0])))
},
ic = function (a) {
var b = new v(a),
c = a.aoData;
b.off("draw.dt.DT_details column-visibility.dt.DT_details destroy.dt.DT_details");
0 < J(c, "_details").length && (b.on("draw.dt.DT_details", function (d, e) {
a === e && b.rows({
page: "current"
}).eq(0).each(function (a) {
a = c[a];
a._detailsShow && a._details.insertAfter(a.nTr)
})
}), b.on("column-visibility.dt.DT_details", function (b, e, f, g) {
if (a === e)
for (e = W(e), f = 0, g = c.length; f < g; f++) b = c[f], b._details && b._details.children("td[colspan]").attr("colspan",
e)
}), b.on("destroy.dt.DT_details", function (d, e) {
if (a === e)
for (d = 0, e = c.length; d < e; d++) c[d]._details && hb(b, d)
}))
};
t("row().child()", function (a, b) {
var c = this.context;
if (a === p) return c.length && this.length ? c[0].aoData[this[0]]._details : p;
!0 === a ? this.child.show() : !1 === a ? hb(this) : c.length && this.length && hc(c[0], c[0].aoData[this[0]], a, b);
return this
});
t(["row().child.show()", "row().child().show()"], function (a) {
Yb(this, !0);
return this
});
t(["row().child.hide()", "row().child().hide()"], function () {
Yb(this, !1);
return this
});
t(["row().child.remove()", "row().child().remove()"], function () {
hb(this);
return this
});
t("row().child.isShown()", function () {
var a = this.context;
return a.length && this.length ? a[0].aoData[this[0]]._detailsShow || !1 : !1
});
var jc = /^([^:]+):(name|visIdx|visible)$/,
Zb = function (a, b, c, d, e) {
c = [];
d = 0;
for (var f = e.length; d < f; d++) c.push(I(a, e[d], b));
return c
},
kc = function (a, b, c) {
var d = a.aoColumns,
e = J(d, "sName"),
h = J(d, "nTh");
return eb("column", b, function (b) {
var g = Sb(b);
if ("" === b) return Z(d.length);
if (null !==
g) return [0 <= g ? g : d.length + g];
if ("function" === typeof b) {
var l = Fa(a, c);
return f.map(d, function (c, d) {
return b(d, Zb(a, d, 0, 0, l), h[d]) ? d : null
})
}
var n = "string" === typeof b ? b.match(jc) : "";
if (n) switch (n[2]) {
case "visIdx":
case "visible":
g = parseInt(n[1], 10);
if (0 > g) {
var m = f.map(d, function (a, b) {
return a.bVisible ? b : null
});
return [m[m.length + g]]
}
return [ba(a, g)];
case "name":
return f.map(e, function (a, b) {
return a === n[1] ? b : null
});
default:
return []
}
if (b.nodeName && b._DT_CellIndex) return [b._DT_CellIndex.column];
g = f(h).filter(b).map(function () {
return f.inArray(this,
h)
}).toArray();
if (g.length || !b.nodeName) return g;
g = f(b).closest("*[data-dt-column]");
return g.length ? [g.data("dt-column")] : []
}, a, c)
};
t("columns()", function (a, b) {
a === p ? a = "" : f.isPlainObject(a) && (b = a, a = "");
b = fb(b);
var c = this.iterator("table", function (c) {
return kc(c, a, b)
}, 1);
c.selector.cols = a;
c.selector.opts = b;
return c
});
x("columns().header()", "column().header()", function (a, b) {
return this.iterator("column", function (a, b) {
return a.aoColumns[b].nTh
}, 1)
});
x("columns().footer()", "column().footer()", function (a,
b) {
return this.iterator("column", function (a, b) {
return a.aoColumns[b].nTf
}, 1)
});
x("columns().data()", "column().data()", function () {
return this.iterator("column-rows", Zb, 1)
});
x("columns().dataSrc()", "column().dataSrc()", function () {
return this.iterator("column", function (a, b) {
return a.aoColumns[b].mData
}, 1)
});
x("columns().cache()", "column().cache()", function (a) {
return this.iterator("column-rows", function (b, c, d, e, f) {
return la(b.aoData, f, "search" === a ? "_aFilterData" : "_aSortData", c)
}, 1)
});
x("columns().nodes()",
"column().nodes()",
function () {
return this.iterator("column-rows", function (a, b, c, d, e) {
return la(a.aoData, e, "anCells", b)
}, 1)
});
x("columns().visible()", "column().visible()", function (a, b) {
var c = this,
d = this.iterator("column", function (b, c) {
if (a === p) return b.aoColumns[c].bVisible;
var d = b.aoColumns,
e = d[c],
h = b.aoData,
n;
if (a !== p && e.bVisible !== a) {
if (a) {
var m = f.inArray(!0, J(d, "bVisible"), c + 1);
d = 0;
for (n = h.length; d < n; d++) {
var q = h[d].nTr;
b = h[d].anCells;
q && q.insertBefore(b[c], b[m] || null)
}
} else f(J(b.aoData, "anCells",
c)).detach();
e.bVisible = a
}
});
a !== p && this.iterator("table", function (d) {
ha(d, d.aoHeader);
ha(d, d.aoFooter);
d.aiDisplay.length || f(d.nTBody).find("td[colspan]").attr("colspan", W(d));
Ba(d);
c.iterator("column", function (c, d) {
A(c, null, "column-visibility", [c, d, a, b])
});
(b === p || b) && c.columns.adjust()
});
return d
});
x("columns().indexes()", "column().index()", function (a) {
return this.iterator("column", function (b, c) {
return "visible" === a ? ca(b, c) : c
}, 1)
});
t("columns.adjust()", function () {
return this.iterator("table", function (a) {
aa(a)
},
1)
});
t("column.index()", function (a, b) {
if (0 !== this.context.length) {
var c = this.context[0];
if ("fromVisible" === a || "toData" === a) return ba(c, b);
if ("fromData" === a || "toVisible" === a) return ca(c, b)
}
});
t("column()", function (a, b) {
return gb(this.columns(a, b))
});
var lc = function (a, b, c) {
var d = a.aoData,
e = Fa(a, c),
h = Vb(la(d, e, "anCells")),
g = f([].concat.apply([], h)),
k, l = a.aoColumns.length,
n, m, q, u, t, v;
return eb("cell", b, function (b) {
var c = "function" === typeof b;
if (null === b || b === p || c) {
n = [];
m = 0;
for (q = e.length; m < q; m++)
for (k =
e[m], u = 0; u < l; u++) t = {
row: k,
column: u
}, c ? (v = d[k], b(t, I(a, k, u), v.anCells ? v.anCells[u] : null) && n.push(t)) : n.push(t);
return n
}
if (f.isPlainObject(b)) return b.column !== p && b.row !== p && -1 !== f.inArray(b.row, e) ? [b] : [];
c = g.filter(b).map(function (a, b) {
return {
row: b._DT_CellIndex.row,
column: b._DT_CellIndex.column
}
}).toArray();
if (c.length || !b.nodeName) return c; | }] : []
}, a, c)
};
t("cells()", function (a, b, c) {
f.isPlainObject(a) &&
(a.row === p ? (c = a, a = null) : (c = b, b = null));
f.isPlainObject(b) && (c = b, b = null);
if (null === b || b === p) return this.iterator("table", function (b) {
return lc(b, a, fb(c))
});
var d = c ? {
page: c.page,
order: c.order,
search: c.search
} : {},
e = this.columns(b, d),
h = this.rows(a, d),
g, k, l, n;
d = this.iterator("table", function (a, b) {
a = [];
g = 0;
for (k = h[b].length; g < k; g++)
for (l = 0, n = e[b].length; l < n; l++) a.push({
row: h[b][g],
column: e[b][l]
});
return a
}, 1);
d = c && c.selected ? this.cells(d, c) : d;
f.extend(d.selector, {
cols: b,
rows: a,
opts: c
});
return d
});
x("cells().nodes()",
"cell().node()",
function () {
return this.iterator("cell", function (a, b, c) {
return (a = a.aoData[b]) && a.anCells ? a.anCells[c] : p
}, 1)
});
t("cells().data()", function () {
return this.iterator("cell", function (a, b, c) {
return I(a, b, c)
}, 1)
});
x("cells().cache()", "cell().cache()", function (a) {
a = "search" === a ? "_aFilterData" : "_aSortData";
return this.iterator("cell", function (b, c, d) {
return b.aoData[c][a][d]
}, 1)
});
x("cells().render()", "cell().render()", function (a) {
return this.iterator("cell", function (b, c, d) {
return I(b, c, d, a)
},
1)
});
x("cells().indexes()", "cell().index()", function () {
return this.iterator("cell", function (a, b, c) {
return {
row: b,
column: c,
columnVisible: ca(a, c)
}
}, 1)
});
x("cells().invalidate()", "cell().invalidate()", function (a) {
return this.iterator("cell", function (b, c, d) {
ea(b, c, a, d)
})
});
t("cell()", function (a, b, c) {
return gb(this.cells(a, b, c))
});
t("cell().data()", function (a) {
var b = this.context,
c = this[0];
if (a === p) return b.length && c.length ? I(b[0], c[0].row, c[0].column) : p;
ob(b[0], c[0].row, c[0].column, a);
ea(b[0], c[0].row,
"data", c[0].column);
return this
});
t("order()", function (a, b) {
var c = this.context;
if (a === p) return 0 !== c.length ? c[0].aaSorting : p;
"number" === typeof a ? a = [
[a, b]
] : a.length && !f.isArray(a[0]) && (a = Array.prototype.slice.call(arguments));
return this.iterator("table", function (b) {
b.aaSorting = a.slice()
})
});
t("order.listener()", function (a, b, c) {
return this.iterator("table", function (d) {
Qa(d, a, b, c)
})
});
t("order.fixed()", function (a) {
if (!a) {
var b = this.context;
b = b.length ? b[0].aaSortingFixed : p;
return f.isArray(b) ? {
pre: b
} :
b
}
return this.iterator("table", function (b) {
b.aaSortingFixed = f.extend(!0, {}, a)
})
});
t(["columns().order()", "column().order()"], function (a) {
var b = this;
return this.iterator("table", function (c, d) {
var e = [];
f.each(b[d], function (b, c) {
e.push([c, a])
});
c.aaSorting = e
})
});
t("search()", function (a, b, c, d) {
var e = this.context;
return a === p ? 0 !== e.length ? e[0].oPreviousSearch.sSearch : p : this.iterator("table", function (e) {
e.oFeatures.bFilter && ia(e, f.extend({}, e.oPreviousSearch, {
sSearch: a + "",
bRegex: null === b ? !1 : b,
bSmart: null ===
c ? !0 : c,
bCaseInsensitive: null === d ? !0 : d
}), 1)
})
});
x("columns().search()", "column().search()", function (a, b, c, d) {
return this.iterator("column", function (e, h) {
var g = e.aoPreSearchCols;
if (a === p) return g[h].sSearch;
e.oFeatures.bFilter && (f.extend(g[h], {
sSearch: a + "",
bRegex: null === b ? !1 : b,
bSmart: null === c ? !0 : c,
bCaseInsensitive: null === d ? !0 : d
}), ia(e, e.oPreviousSearch, 1))
})
});
t("state()", function () {
return this.context.length ? this.context[0].oSavedState : null
});
t("state.clear()", function () {
return this.iterator("table",
function (a) {
a.fnStateSaveCallback.call(a.oInstance, a, {})
})
});
t("state.loaded()", function () {
return this.context.length ? this.context[0].oLoadedState : null
});
t("state.save()", function () {
return this.iterator("table", function (a) {
Ba(a)
})
});
q.versionCheck = q.fnVersionCheck = function (a) {
var b = q.version.split(".");
a = a.split(".");
for (var c, d, e = 0, f = a.length; e < f; e++)
if (c = parseInt(b[e], 10) || 0, d = parseInt(a[e], 10) || 0, c !== d) return c > d;
return !0
};
q.isDataTable = q.fnIsDataTable = function (a) {
var b = f(a).get(0),
c = !1;
if (a instanceof q.Api) return !0;
f.each(q.settings, function (a, e) {
a = e.nScrollHead ? f("table", e.nScrollHead)[0] : null;
var d = e.nScrollFoot ? f("table", e.nScrollFoot)[0] : null;
if (e.nTable === b || a === b || d === b) c = !0
});
return c
};
q.tables = q.fnTables = function (a) {
var b = !1;
f.isPlainObject(a) && (b = a.api, a = a.visible);
var c = f.map(q.settings, function (b) {
if (!a || a && f(b.nTable).is(":visible")) return b.nTable
});
return b ? new v(c) : c
};
q.camelToHungarian = L;
t("$()", function (a, b) {
b = this.rows(b).nodes();
b = f(b);
return f([].concat(b.filter(a).toArray(),
b.find(a).toArray()))
});
f.each(["on", "one", "off"], function (a, b) {
t(b + "()", function () {
var a = Array.prototype.slice.call(arguments);
a[0] = f.map(a[0].split(/\s/), function (a) {
return a.match(/\.dt\b/) ? a : a + ".dt"
}).join(" ");
var d = f(this.tables().nodes());
d[b].apply(d, a);
return this
})
});
t("clear()", function () {
return this.iterator("table", function (a) {
qa(a)
})
});
t("settings()", function () {
return new v(this.context, this.context)
});
t("init()", function () {
var a = this.context;
return a.length ? a[0].oInit : null
});
t("data()",
function () {
return this.iterator("table", function (a) {
return J(a.aoData, "_aData")
}).flatten()
});
t("destroy()", function (a) {
a = a || !1;
return this.iterator("table", function (b) {
var c = b.nTableWrapper.parentNode,
d = b.oClasses,
e = b.nTable,
h = b.nTBody,
g = b.nTHead,
k = b.nTFoot,
l = f(e);
h = f(h);
var n = f(b.nTableWrapper),
m = f.map(b.aoData, function (a) {
return a.nTr
}),
p;
b.bDestroying = !0;
A(b, "aoDestroyCallback", "destroy", [b]);
a || (new v(b)).columns().visible(!0);
n.off(".DT").find(":not(tbody *)").off(".DT");
f(z).off(".DT-" + b.sInstance);
e != g.parentNode && (l.children("thead").detach(), l.append(g));
k && e != k.parentNode && (l.children("tfoot").detach(), l.append(k));
b.aaSorting = [];
b.aaSortingFixed = [];
Aa(b);
f(m).removeClass(b.asStripeClasses.join(" "));
f("th, td", g).removeClass(d.sSortable + " " + d.sSortableAsc + " " + d.sSortableDesc + " " + d.sSortableNone);
h.children().detach();
h.append(m);
g = a ? "remove" : "detach";
l[g]();
n[g]();
!a && c && (c.insertBefore(e, b.nTableReinsertBefore), l.css("width", b.sDestroyWidth).removeClass(d.sTable), (p = b.asDestroyStripes.length) &&
h.children().each(function (a) {
f(this).addClass(b.asDestroyStripes[a % p])
}));
c = f.inArray(b, q.settings); - 1 !== c && q.settings.splice(c, 1)
})
});
f.each(["column", "row", "cell"], function (a, b) {
t(b + "s().every()", function (a) {
var c = this.selector.opts,
e = this;
return this.iterator(b, function (d, f, k, l, n) {
a.call(e[b](f, "cell" === b ? k : c, "cell" === b ? c : p), f, k, l, n)
})
})
});
t("i18n()", function (a, b, c) {
var d = this.context[0];
a = U(a)(d.oLanguage);
a === p && (a = b);
c !== p && f.isPlainObject(a) && (a = a[c] !== p ? a[c] : a._);
return a.replace("%d", c)
});
q.version = "1.10.20";
q.settings = [];
q.models = {};
q.models.oSearch = {
bCaseInsensitive: !0,
sSearch: "",
bRegex: !1,
bSmart: !0
};
q.models.oRow = {
nTr: null,
anCells: null,
_aData: [],
_aSortData: null,
_aFilterData: null,
_sFilterRow: null,
_sRowStripe: "",
src: null,
idx: -1
};
q.models.oColumn = {
idx: null,
aDataSort: null,
asSorting: null,
bSearchable: null,
bSortable: null,
bVisible: null,
_sManualType: null,
_bAttrSrc: !1,
fnCreatedCell: null,
fnGetData: null,
fnSetData: null,
mData: null,
mRender: null,
nTh: null,
nTf: null,
sClass: null,
sContentPadding: null,
sDefaultContent: null,
sName: null,
sSortDataType: "std",
sSortingClass: null,
sSortingClassJUI: null,
sTitle: null,
sType: null,
sWidth: null,
sWidthOrig: null
};
q.defaults = {
aaData: null,
aaSorting: [
[0, "asc"]
],
aaSortingFixed: [],
ajax: null,
aLengthMenu: [5, 10, 25, 50, 100],
aoColumns: null,
aoColumnDefs: null,
aoSearchCols: [],
asStripeClasses: null,
bAutoWidth: !0,
bDeferRender: !1,
bDestroy: !1,
bFilter: !0,
bInfo: !0,
bLengthChange: !0,
bPaginate: !0,
bProcessing: !1,
bRetrieve: !1,
bScrollCollapse: !1,
bServerSide: !1,
bSort: !0,
bSortMulti: !0,
bSortCellsTop: !1,
bSortClasses: !0,
bStateSave: !1,
fnCreatedRow: null,
fnDrawCallback: null,
fnFooterCallback: null,
fnFormatNumber: function (a) {
return a.toString().replace(/\B(?=(\d{3})+(?!\d))/g, this.oLanguage.sThousands)
},
fnHeaderCallback: null,
fnInfoCallback: null,
fnInitComplete: null,
fnPreDrawCallback: null,
fnRowCallback: null,
fnServerData: null,
fnServerParams: null,
fnStateLoadCallback: function (a) {
try {
return JSON.parse((-1 === a.iStateDuration ? sessionStorage : localStorage).getItem("DataTables_" + a.sInstance + "_" + location.pathname))
} catch (b) {}
},
fnStateLoadParams: null,
fnStateLoaded: null,
fnStateSaveCallback: function (a, b) {
try {
(-1 === a.iStateDuration ? sessionStorage : localStorage).setItem("DataTables_" + a.sInstance + "_" + location.pathname, JSON.stringify(b))
} catch (c) {}
},
fnStateSaveParams: null,
iStateDuration: 7200,
iDeferLoading: null,
iDisplayLength: 5,
iDisplayStart: 0,
iTabIndex: 0,
oClasses: {},
oLanguage: {
oAria: {
sSortAscending: ": activate to sort column ascending",
sSortDescending: ": activate to sort column descending"
},
oPaginate: {
sFirst: "First",
sLast: "Last",
sNext: "Next",
sPrevious: "Previous"
},
sEmptyTable: "No data available in table",
sInfo: "Showing _START_ to _END_ of _TOTAL_ entries",
sInfoEmpty: "Showing 0 to 0 of 0 entries",
sInfoFiltered: "(filtered from _MAX_ total entries)",
sInfoPostFix: "",
sDecimal: "",
sThousands: ",",
sLengthMenu: "Show _MENU_ entries",
sLoadingRecords: "Loading...",
sProcessing: "Processing...",
sSearch: "Search:",
sSearchPlaceholder: "",
sUrl: "",
sZeroRecords: "No matching records found"
},
oSearch: f.extend({}, q.models.oSearch),
sAjaxDataProp: "data",
sAjaxSource: null,
sDom: "lfrtip",
searchDelay: null,
sPaginationType: "simple_numbers",
sScrollX: "",
sScrollXInner: "",
sScrollY: "",
sServerMethod: "GET",
renderer: null,
rowId: "DT_RowId"
};
H(q.defaults);
q.defaults.column = {
aDataSort: null,
iDataSort: -1,
asSorting: ["asc", "desc"],
bSearchable: !0,
bSortable: !0,
bVisible: !0,
fnCreatedCell: null,
mData: null,
mRender: null,
sCellType: "td",
sClass: "",
sContentPadding: "",
sDefaultContent: null,
sName: "",
sSortDataType: "std",
sTitle: null,
sType: null,
sWidth: null
};
H(q.defaults.column);
q.models.oSettings = {
oFeatures: {
bAutoWidth: null,
bDeferRender: null,
bFilter: null,
bInfo: null,
bLengthChange: null,
bPaginate: null,
bProcessing: null,
bServerSide: null,
bSort: null,
bSortMulti: null,
bSortClasses: null,
bStateSave: null
},
oScroll: {
bCollapse: null,
iBarWidth: 0,
sX: null,
sXInner: null,
sY: null
},
oLanguage: {
fnInfoCallback: null
},
oBrowser: {
bScrollOversize: !1,
bScrollbarLeft: !1,
bBounding: !1,
barWidth: 0
},
ajax: null,
aanFeatures: [],
aoData: [],
aiDisplay: [],
aiDisplayMaster: [],
aIds: {},
aoColumns: [],
aoHeader: [],
aoFooter: [],
oPreviousSearch: {},
aoPreSearchCols: [],
aaSorting: null,
aaSortingFixed: [],
asStripeClasses: null,
asDestroyStripes: [],
sDestroyWidth: 0,
aoRowCallback: [],
aoHeaderCallback: [],
aoFooterCallback: [],
aoDrawCallback: [],
aoRowCreatedCallback: [],
aoPreDrawCallback: [],
aoInitComplete: [],
aoStateSaveParams: [],
aoStateLoadParams: [],
aoStateLoaded: [],
sTableId: "",
nTable: null,
nTHead: null,
nTFoot: null,
nTBody: null,
nTableWrapper: null,
bDeferLoading: !1,
bInitialised: !1,
aoOpenRows: [],
sDom: null,
searchDelay: null,
sPaginationType: "two_button",
iStateDuration: 0,
aoStateSave: [],
aoStateLoad: [],
oSavedState: null,
oLoadedState: null,
sAjaxSource: null,
sAjaxDataProp: null,
bAjaxDataGet: !0,
jqXHR: null,
json: p,
oAjaxData: p,
fnServerData: null,
aoServerParams: [],
sServerMethod: null,
fnFormatNumber: null,
aLengthMenu: null,
iDraw: 0,
bDrawing: !1,
iDrawError: -1,
_iDisplayLength: 5,
_iDisplayStart: 0,
_iRecordsTotal: 0,
_iRecordsDisplay: 0,
oClasses: {},
bFiltered: !1,
bSorted: !1,
bSortCellsTop: null,
oInit: null,
aoDestroyCallback: [],
fnRecordsTotal: function () {
return "ssp" == D(this) ? 1 * this._iRecordsTotal :
this.aiDisplayMaster.length
},
fnRecordsDisplay: function () {
return "ssp" == D(this) ? 1 * this._iRecordsDisplay : this.aiDisplay.length
},
fnDisplayEnd: function () {
var a = this._iDisplayLength,
b = this._iDisplayStart,
c = b + a,
d = this.aiDisplay.length,
e = this.oFeatures,
f = e.bPaginate;
return e.bServerSide ? !1 === f || -1 === a ? b + d : Math.min(b + a, this._iRecordsDisplay) : !f || c > d || -1 === a ? d : c
},
oInstance: null,
sInstance: null,
iTabIndex: 0,
nScrollHead: null,
nScrollFoot: null,
aLastSort: [],
oPlugins: {},
rowIdFn: null,
rowId: null
};
q.ext = C = {
buttons: {},
classes: {},
build: "dt/dt-1.10.20",
errMode: "alert",
feature: [],
search: [],
selector: {
cell: [],
column: [],
row: []
},
internal: {},
legacy: {
ajax: null
},
pager: {},
renderer: {
pageButton: {},
header: {}
},
order: {},
type: {
detect: [],
search: {},
order: {}
},
_unique: 0,
fnVersionCheck: q.fnVersionCheck,
iApiIndex: 0,
oJUIClasses: {},
sVersion: q.version
};
f.extend(C, {
afnFiltering: C.search,
aTypes: C.type.detect,
ofnSearch: C.type.search,
oSort: C.type.order,
afnSortData: C.order,
aoFeatures: C.feature,
oApi: C.internal,
oStdClasses: C.classes,
oPagination: C.pager
});
f.extend(q.ext.classes, {
sTable: "dataTable",
sNoFooter: "no-footer",
sPageButton: "paginate_button",
sPageButtonActive: "current",
sPageButtonDisabled: "disabled",
sStripeOdd: "odd",
sStripeEven: "even",
sRowEmpty: "dataTables_empty",
sWrapper: "dataTables_wrapper",
sFilter: "dataTables_filter",
sInfo: "dataTables_info",
sPaging: "dataTables_paginate paging_",
sLength: "dataTables_length",
sProcessing: "dataTables_processing",
sSortAsc: "sorting_asc",
sSortDesc: "sorting_desc",
sSortable: "sorting",
sSortableAsc: "sorting_asc_disabled",
sSortableDesc: "sorting_desc_disabled",
sSortableNone: "sorting_disabled",
sSortColumn: "sorting_",
sFilterInput: "",
sLengthSelect: "",
sScrollWrapper: "dataTables_scroll",
sScrollHead: "dataTables_scrollHead",
sScrollHeadInner: "dataTables_scrollHeadInner",
sScrollBody: "dataTables_scrollBody",
sScrollFoot: "dataTables_scrollFoot",
sScrollFootInner: "dataTables_scrollFootInner",
sHeaderTH: "",
sFooterTH: "",
sSortJUIAsc: "",
sSortJUIDesc: "",
sSortJUI: "",
sSortJUIAscAllowed: "",
sSortJUIDescAllowed: "",
sSortJUIWrapper: "",
sSortIcon: "",
sJUIHeader: "",
sJUIFooter: ""
});
var Pb = q.ext.pager;
f.extend(Pb, {
simple: function (a, b) {
return ["previous", "next"]
},
full: function (a, b) {
return ["first", "previous", "next", "last"]
},
numbers: function (a, b) {
return [ka(a, b)]
},
simple_numbers: function (a, b) {
return ["previous", ka(a, b), "next"]
},
full_numbers: function (a, b) {
return ["first", "previous", ka(a, b), "next", "last"]
},
first_last_numbers: function (a, b) {
return ["first", ka(a, b), "last"]
},
_numbers: ka,
numbers_length: 7
});
f.extend(!0, q.ext.renderer, {
pageButton: {
_: function (a, b,
c, d, e, h) {
var g = a.oClasses,
k = a.oLanguage.oPaginate,
l = a.oLanguage.oAria.paginate || {},
n, m, q = 0,
t = function (b, d) {
var p, r = g.sPageButtonDisabled,
u = function (b) {
Xa(a, b.data.action, !0)
};
var w = 0;
for (p = d.length; w < p; w++) {
var v = d[w];
if (f.isArray(v)) {
var x = f("<" + (v.DT_el || "div") + "/>").appendTo(b);
t(x, v)
} else {
n = null;
m = v;
x = a.iTabIndex;
switch (v) {
case "ellipsis":
b.append('<span class="ellipsis">…</span>');
break;
case "first":
n = k.sFirst;
0 === e && (x = -1, m += " " + r);
break;
case "previous":
n = k.sPrevious;
0 === e && (x = -1, m +=
" " + r);
break;
case "next":
n = k.sNext;
e === h - 1 && (x = -1, m += " " + r);
break;
case "last":
n = k.sLast;
e === h - 1 && (x = -1, m += " " + r);
break;
default:
n = v + 1, m = e === v ? g.sPageButtonActive : ""
}
null !== n && (x = f("<a>", {
"class": g.sPageButton + " " + m,
"aria-controls": a.sTableId,
"aria-label": l[v],
"data-dt-idx": q,
tabindex: x,
id: 0 === c && "string" === typeof v ? a.sTableId + "_" + v : null
}).html(n).appendTo(b), $a(x, {
action: v
}, u), q++)
}
}
};
try {
var v = f(b).find(y.activeElement).data("dt-idx")
} catch (mc) {}
t(f(b).empty(), d);
v !== p && f(b).find("[data-dt-idx=" +
v + "]").focus()
}
}
});
f.extend(q.ext.type.detect, [function (a, b) {
b = b.oLanguage.sDecimal;
return db(a, b) ? "num" + b : null
}, function (a, b) {
if (a && !(a instanceof Date) && !cc.test(a)) return null;
b = Date.parse(a);
return null !== b && !isNaN(b) || P(a) ? "date" : null
}, function (a, b) {
b = b.oLanguage.sDecimal;
return db(a, b, !0) ? "num-fmt" + b : null
}, function (a, b) {
b = b.oLanguage.sDecimal;
return Ub(a, b) ? "html-num" + b : null
}, function (a, b) {
b = b.oLanguage.sDecimal;
return Ub(a, b, !0) ? "html-num-fmt" + b : null
}, function (a, b) {
return P(a) || "string" ===
typeof a && -1 !== a.indexOf("<") ? "html" : null
}]);
f.extend(q.ext.type.search, {
html: function (a) {
return P(a) ? a : "string" === typeof a ? a.replace(Rb, " ").replace(Ea, "") : ""
},
string: function (a) {
return P(a) ? a : "string" === typeof a ? a.replace(Rb, " ") : a
}
});
var Da = function (a, b, c, d) {
if (0 !== a && (!a || "-" === a)) return -Infinity;
b && (a = Tb(a, b));
a.replace && (c && (a = a.replace(c, "")), d && (a = a.replace(d, "")));
return 1 * a
};
f.extend(C.type.order, {
"date-pre": function (a) {
a = Date.parse(a);
return isNaN(a) ? -Infinity : a
},
"html-pre": function (a) {
return P(a) ?
"" : a.replace ? a.replace(/<.*?>/g, "").toLowerCase() : a + ""
},
"string-pre": function (a) {
return P(a) ? "" : "string" === typeof a ? a.toLowerCase() : a.toString ? a.toString() : ""
},
"string-asc": function (a, b) {
return a < b ? -1 : a > b ? 1 : 0
},
"string-desc": function (a, b) {
return a < b ? 1 : a > b ? -1 : 0
}
});
Ha("");
f.extend(!0, q.ext.renderer, {
header: {
_: function (a, b, c, d) {
f(a.nTable).on("order.dt.DT", function (e, f, g, k) {
a === f && (e = c.idx, b.removeClass(c.sSortingClass + " " + d.sSortAsc + " " + d.sSortDesc).addClass("asc" == k[e] ? d.sSortAsc : "desc" == k[e] ? d.sSortDesc :
c.sSortingClass))
})
},
jqueryui: function (a, b, c, d) {
f("<div/>").addClass(d.sSortJUIWrapper).append(b.contents()).append(f("<span/>").addClass(d.sSortIcon + " " + c.sSortingClassJUI)).appendTo(b);
f(a.nTable).on("order.dt.DT", function (e, f, g, k) {
a === f && (e = c.idx, b.removeClass(d.sSortAsc + " " + d.sSortDesc).addClass("asc" == k[e] ? d.sSortAsc : "desc" == k[e] ? d.sSortDesc : c.sSortingClass), b.find("span." + d.sSortIcon).removeClass(d.sSortJUIAsc + " " + d.sSortJUIDesc + " " + d.sSortJUI + " " + d.sSortJUIAscAllowed + " " + d.sSortJUIDescAllowed).addClass("asc" ==
k[e] ? d.sSortJUIAsc : "desc" == k[e] ? d.sSortJUIDesc : c.sSortingClassJUI))
})
}
}
});
var ib = function (a) {
return "string" === typeof a ? a.replace(/</g, "<").replace(/>/g, ">").replace(/"/g, """) : a
};
q.render = {
number: function (a, b, c, d, e) {
return {
display: function (f) {
if ("number" !== typeof f && "string" !== typeof f) return f;
var g = 0 > f ? "-" : "",
h = parseFloat(f);
if (isNaN(h)) return ib(f);
h = h.toFixed(c);
f = Math.abs(h);
h = parseInt(f, 10);
f = c ? b + (f - h).toFixed(c).substring(2) : "";
return g + (d || "") + h.toString().replace(/\B(?=(\d{3})+(?!\d))/g,
a) + f + (e || "")
}
}
},
text: function () {
return {
display: ib,
filter: ib
}
}
};
f.extend(q.ext.internal, {
_fnExternApiFunc: Qb,
_fnBuildAjax: va,
_fnAjaxUpdate: qb,
_fnAjaxParameters: zb,
_fnAjaxUpdateDraw: Ab,
_fnAjaxDataSrc: wa,
_fnAddColumn: Ia,
_fnColumnOptions: ma,
_fnAdjustColumnSizing: aa,
_fnVisibleToColumnIndex: ba,
_fnColumnIndexToVisible: ca,
_fnVisbleColumns: W,
_fnGetColumns: oa,
_fnColumnTypes: Ka,
_fnApplyColumnDefs: nb,
_fnHungarianMap: H,
_fnCamelToHungarian: L,
_fnLanguageCompat: Ga,
_fnBrowserDetect: lb,
_fnAddData: R,
_fnAddTr: pa,
_fnNodeToDataIndex: function (a,
b) {
return b._DT_RowIndex !== p ? b._DT_RowIndex : null
},
_fnNodeToColumnIndex: function (a, b, c) {
return f.inArray(c, a.aoData[b].anCells)
},
_fnGetCellData: I,
_fnSetCellData: ob,
_fnSplitObjNotation: Na,
_fnGetObjectDataFn: U,
_fnSetObjectDataFn: Q,
_fnGetDataMaster: Oa,
_fnClearTable: qa,
_fnDeleteIndex: ra,
_fnInvalidate: ea,
_fnGetRowElements: Ma,
_fnCreateTr: La,
_fnBuildHead: pb,
_fnDrawHead: ha,
_fnDraw: S,
_fnReDraw: V,
_fnAddOptionsHtml: sb,
_fnDetectHeader: fa,
_fnGetUniqueThs: ua,
_fnFeatureHtmlFilter: ub,
_fnFilterComplete: ia,
_fnFilterCustom: Db,
_fnFilterColumn: Cb,
_fnFilter: Bb,
_fnFilterCreateSearch: Ta,
_fnEscapeRegex: Ua,
_fnFilterData: Eb,
_fnFeatureHtmlInfo: xb,
_fnUpdateInfo: Hb,
_fnInfoMacros: Ib,
_fnInitialise: ja,
_fnInitComplete: xa,
_fnLengthChange: Va,
_fnFeatureHtmlLength: tb,
_fnFeatureHtmlPaginate: yb,
_fnPageChange: Xa,
_fnFeatureHtmlProcessing: vb,
_fnProcessingDisplay: K,
_fnFeatureHtmlTable: wb,
_fnScrollDraw: na,
_fnApplyToChildren: N,
_fnCalculateColumnWidths: Ja,
_fnThrottle: Sa,
_fnConvertToWidth: Jb,
_fnGetWidestNode: Kb,
_fnGetMaxLenString: Lb,
_fnStringToCss: B,
_fnSortFlatten: Y,
_fnSort: rb,
_fnSortAria: Nb,
_fnSortListener: Za,
_fnSortAttachListener: Qa,
_fnSortingClasses: Aa,
_fnSortData: Mb,
_fnSaveState: Ba,
_fnLoadState: Ob,
_fnSettingsFromNode: Ca,
_fnLog: O,
_fnMap: M,
_fnBindAction: $a,
_fnCallbackReg: E,
_fnCallbackFire: A,
_fnLengthOverflow: Wa,
_fnRenderer: Ra,
_fnDataSource: D,
_fnRowAttributes: Pa,
_fnExtend: ab,
_fnCalculateEnd: function () {}
});
f.fn.dataTable = q;
q.$ = f;
f.fn.dataTableSettings = q.settings;
f.fn.dataTableExt = q.ext;
f.fn.DataTable = function (a) {
return f(this).dataTable(a).api()
};
f.each(q, function (a, b) {
f.fn.DataTable[a] = b
});
return f.fn.dataTable
}); | v = f(b).closest("*[data-dt-row]");
return v.length ? [{
row: v.data("dt-row"),
column: v.data("dt-column") |
views.py | from django.shortcuts import render
from django.views import View
from django.http import HttpResponse
from django.utils.decorators import method_decorator
# Create your views here.
"""
类视图必须继承View
类视图中的方法名都必须是请求方法名小写
"""
def my_decorator(view_func):
"""定义装饰器"""
def wrapper(request, *args, **kwargs):
print('装饰器被调用了')
return view_func(request, *args, **kwargs)
return wrapper
# @my_decorator
# def index(request):
# return HttpResponse('ok')
# 将普通装饰器进行转换为方法/类的装饰器
# @method_decorator(要进行转换的装饰器, name='要装饰类中的那个方法)
# @method_decorator(my_decorator, name='get')
class DemoView(View):
"""定义类视图"""
# @my_decorator
@method_decorator(my_decorator)
def get(self, request):
return HttpResponse('get请求业务逻辑')
def post(self, request):
return HttpResponse('post请求业务逻辑')
# 映射机制 动态查找
# hasattr() 判断类中是否有某个成员(属性和方法) bool
# getattr() 获取类中的属性或方法 | def template_demo(request):
"""演示模板使用"""
# render(请求对象, 加载模板文件名, 上下文数据)
# 传入到模板中进行渲染的上下文数据必须是以字典的格式传入
context = {
'name': 'zhangsan',
'alist': [10, 20, 30],
'adict': {'age': 20, 'name': 'ww'}
}
return render(request, 'index.html', context) | # __import__() # 动态导包
# GET /template_demo/ |
random_player.py | import random
from pyschieber.player.base_player import BasePlayer
from pyschieber.trumpf import Trumpf
class RandomPlayer(BasePlayer):
def choose_trumpf(self, geschoben):
return move(choices=list(Trumpf))
def choose_card(self, state=None):
cards = self.allowed_cards(state=state)
return move(choices=cards)
def move(choices):
| allowed = False
while not allowed:
choice = random.choice(choices)
allowed = yield choice
if allowed:
yield None |
|
mockclassfactory_test.js | // Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License. | goog.require('goog.testing.MockClassFactory');
goog.require('goog.testing.jsunit');
goog.provide('fake.BaseClass');
goog.provide('fake.ChildClass');
goog.provide('goog.testing.MockClassFactoryTest');
// Classes that will be mocked. A base class and child class are used to
// test inheritance.
fake.BaseClass = function(a) {
fail('real object should never be called');
};
fake.BaseClass.prototype.foo = function() {
fail('real object should never be called');
};
fake.BaseClass.prototype.toString = function() {
return 'foo';
};
fake.BaseClass.prototype.toLocaleString = function() {
return 'bar';
};
fake.BaseClass.prototype.overridden = function() {
return 42;
};
fake.ChildClass = function(a) {
fail('real object should never be called');
};
goog.inherits(fake.ChildClass, fake.BaseClass);
fake.ChildClass.staticFoo = function() {
fail('real object should never be called');
};
fake.ChildClass.prototype.bar = function() {
fail('real object should never be called');
};
fake.ChildClass.staticProperty = 'staticPropertyOnClass';
function TopLevelBaseClass() {}
fake.ChildClass.prototype.overridden = function() {
const superResult = fake.ChildClass.base(this, 'overridden');
if (superResult != 42) {
fail('super method not invoked or returned wrong value');
}
return superResult + 1;
};
const mockClassFactory = new goog.testing.MockClassFactory();
const matchers = goog.testing.mockmatchers;
function tearDown() {
mockClassFactory.reset();
}
function testGetStrictMockClass() {
const mock1 = mockClassFactory.getStrictMockClass(fake, fake.BaseClass, 1);
mock1.foo();
mock1.$replay();
const mock2 = mockClassFactory.getStrictMockClass(fake, fake.BaseClass, 2);
mock2.foo();
mock2.$replay();
const mock3 = mockClassFactory.getStrictMockClass(fake, fake.ChildClass, 3);
mock3.foo();
mock3.bar();
mock3.$replay();
const instance1 = new fake.BaseClass(1);
instance1.foo();
mock1.$verify();
const instance2 = new fake.BaseClass(2);
instance2.foo();
mock2.$verify();
const instance3 = new fake.ChildClass(3);
instance3.foo();
instance3.bar();
mock3.$verify();
assertThrows(function() {
new fake.BaseClass(-1);
});
assertTrue(instance1 instanceof fake.BaseClass);
assertTrue(instance2 instanceof fake.BaseClass);
assertTrue(instance3 instanceof fake.ChildClass);
}
function testGetStrictMockClassCreatesAllProxies() {
const mock1 = mockClassFactory.getStrictMockClass(fake, fake.BaseClass, 1);
// toString(), toLocaleString() and others are treaded specially in
// createProxy_().
mock1.toString();
mock1.toLocaleString();
mock1.$replay();
const instance1 = new fake.BaseClass(1);
instance1.toString();
instance1.toLocaleString();
mock1.$verify();
}
function testGetLooseMockClass() {
const mock1 = mockClassFactory.getLooseMockClass(fake, fake.BaseClass, 1);
mock1.foo().$anyTimes().$returns(3);
mock1.$replay();
const mock2 = mockClassFactory.getLooseMockClass(fake, fake.BaseClass, 2);
mock2.foo().$times(3);
mock2.$replay();
const mock3 = mockClassFactory.getLooseMockClass(fake, fake.ChildClass, 3);
mock3.foo().$atLeastOnce().$returns(5);
mock3.bar().$atLeastOnce();
mock3.$replay();
const instance1 = new fake.BaseClass(1);
assertEquals(3, instance1.foo());
assertEquals(3, instance1.foo());
assertEquals(3, instance1.foo());
assertEquals(3, instance1.foo());
assertEquals(3, instance1.foo());
mock1.$verify();
const instance2 = new fake.BaseClass(2);
instance2.foo();
instance2.foo();
instance2.foo();
mock2.$verify();
const instance3 = new fake.ChildClass(3);
assertEquals(5, instance3.foo());
assertEquals(5, instance3.foo());
instance3.bar();
mock3.$verify();
assertThrows(function() {
new fake.BaseClass(-1);
});
assertTrue(instance1 instanceof fake.BaseClass);
assertTrue(instance2 instanceof fake.BaseClass);
assertTrue(instance3 instanceof fake.ChildClass);
}
function testGetStrictStaticMock() {
const staticMock =
mockClassFactory.getStrictStaticMock(fake, fake.ChildClass);
const mock = mockClassFactory.getStrictMockClass(fake, fake.ChildClass, 1);
mock.foo();
mock.bar();
staticMock.staticFoo();
mock.$replay();
staticMock.$replay();
const instance = new fake.ChildClass(1);
instance.foo();
instance.bar();
fake.ChildClass.staticFoo();
mock.$verify();
staticMock.$verify();
assertTrue(instance instanceof fake.BaseClass);
assertTrue(instance instanceof fake.ChildClass);
assertThrows(function() {
mockClassFactory.getLooseStaticMock(fake, fake.ChildClass);
});
}
function testGetStrictStaticMockKeepsStaticProperties() {
const OriginalChildClass = fake.ChildClass;
const staticMock =
mockClassFactory.getStrictStaticMock(fake, fake.ChildClass);
assertEquals(
OriginalChildClass.staticProperty, fake.ChildClass.staticProperty);
}
function testGetLooseStaticMockKeepsStaticProperties() {
const OriginalChildClass = fake.ChildClass;
const staticMock = mockClassFactory.getLooseStaticMock(fake, fake.ChildClass);
assertEquals(
OriginalChildClass.staticProperty, fake.ChildClass.staticProperty);
}
function testGetLooseStaticMock() {
const staticMock = mockClassFactory.getLooseStaticMock(fake, fake.ChildClass);
const mock = mockClassFactory.getStrictMockClass(fake, fake.ChildClass, 1);
mock.foo();
mock.bar();
staticMock.staticFoo().$atLeastOnce();
mock.$replay();
staticMock.$replay();
const instance = new fake.ChildClass(1);
instance.foo();
instance.bar();
fake.ChildClass.staticFoo();
fake.ChildClass.staticFoo();
mock.$verify();
staticMock.$verify();
assertTrue(instance instanceof fake.BaseClass);
assertTrue(instance instanceof fake.ChildClass);
assertThrows(function() {
mockClassFactory.getStrictStaticMock(fake, fake.ChildClass);
});
}
function testFlexibleClassMockInstantiation() {
// This mock should be returned for all instances created with a number
// as the first argument.
const mock = mockClassFactory.getStrictMockClass(
fake, fake.ChildClass, matchers.isNumber);
mock.foo(); // Will be called by the first mock instance.
mock.foo(); // Will be called by the second mock instance.
mock.$replay();
const instance1 = new fake.ChildClass(1);
const instance2 = new fake.ChildClass(2);
instance1.foo();
instance2.foo();
assertThrows(function() { new fake.ChildClass('foo'); });
mock.$verify();
}
function testMockTopLevelClass() {
const mock = mockClassFactory.getStrictMockClass(
goog.global, goog.global.TopLevelBaseClass);
}
function testGoogBaseCall() {
const overriddenFn = fake.ChildClass.prototype.overridden;
const mock = mockClassFactory.getLooseMockClass(fake, fake.ChildClass, 1);
const instance1 = new fake.ChildClass(1);
assertTrue(43 == overriddenFn.call(instance1));
} |
goog.setTestOnly('goog.testing.MockClassFactoryTest');
goog.require('goog.testing'); |
customError.js | constructor(statusCode, message, data = []) {
super();
this.statusCode = statusCode;
this.message = message;
this.data = data;
}
}
module.exports = CustomError; | class CustomError extends Error { |
|
setup.py | """
Setup file for ellpy.
Use setup.cfg to configure your project.
This file was generated with PyScaffold 4.0.2.
PyScaffold helps you to put up the scaffold of your new Python project.
Learn more under: https://pyscaffold.org/
"""
from setuptools import setup | setup(use_scm_version={"version_scheme": "no-guess-dev"})
except: # noqa
print(
"\n\nAn error occurred while building the project, "
"please ensure you have the most updated version of setuptools, "
"setuptools_scm and wheel with:\n"
" pip install -U setuptools setuptools_scm wheel\n\n"
)
raise |
if __name__ == "__main__":
try: |
ui.py | # Copyright (c) 2010-2017 Bo Lin
# Copyright (c) 2010-2017 Yanhong Annie Liu
# Copyright (c) 2010-2017 Stony Brook University
# Copyright (c) 2010-2017 The Research Foundation of SUNY
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import os.path
import ast
import sys
import time
import argparse
from .. import __version__
from ..importer import da_cache_from_source
from .utils import is_valid_debug_level, set_debug_level, to_source, to_file
from .parser import daast_from_file
from .parser import daast_from_str
from .pygen import PythonGenerator
from .incgen import gen_inc_module
from .pseudo import DastUnparser
# DistAlgo filename suffix
DA_SUFFIX = "da"
stdout = sys.stdout
stderr = sys.stderr
# Benchmark stats:
WallclockStart = 0
InputSize = 0
OutputSize = 0
def dastr_to_pyast(src, filename='<str>', args=None):
"""Translates DistAlgo source string into executable Python AST.
'src' is the DistAlgo source string to parse. Optional argument 'filename'
is the filename that appears in error messages. Optional argument 'args'
is a Namespace object containing the command line parameters for the
compiler. Returns the generated Python AST.
"""
daast = daast_from_str(src, filename, args)
if daast is not None:
pyast = PythonGenerator(filename, args).visit(daast)
if pyast is None:
print("Error: unable to generate Python AST from DistAlgo AST"
" for file ", filename, file=stderr)
assert isinstance(pyast, list) and len(pyast) == 1 and \
isinstance(pyast[0], ast.Module)
pyast = pyast[0]
ast.fix_missing_locations(pyast)
return pyast
else:
return None
def dafile_to_pyast(filename, args=None): | Returns the generated Python AST.
"""
if args is None:
args = parse_compiler_args([])
daast = daast_from_file(filename, args)
if daast is not None:
pyast = PythonGenerator(filename, args).visit(daast)
if pyast is None:
print("Error: unable to generate Python AST from DistAlgo AST"
" for file ", filename, file=stderr)
return None
assert isinstance(pyast, list) and len(pyast) == 1 and \
isinstance(pyast[0], ast.Module)
pyast = pyast[0]
ast.fix_missing_locations(pyast)
if args and hasattr(args, 'dump_ast') and args.dump_ast:
print(ast.dump(pyast, include_attributes=True), file=stderr)
return pyast
else:
return None
def _pyast_to_pycode(pyast, filename, _optimize=-1):
try:
return compile(pyast, filename, mode='exec',
dont_inherit=True, optimize=_optimize)
except Exception as e:
print("Unable to generate bytecode: {}".format(e), file=stderr)
return None
def dafile_to_pycode(filename, args=None, _optimize=-1, dfile=None):
"""Generates compiled Python code object from DistAlgo source file.
'filename' is the source file to compile. Optional argument 'args' is a
Namespace object containing the command line parameters for the compiler.
Returns the compiled Python code object, or None in case of errors.
"""
pyast = dafile_to_pyast(filename, args)
if pyast is not None:
return _pyast_to_pycode(pyast,
dfile if dfile is not None else filename,
_optimize)
else:
return None
def dastr_to_pycode(src, filename='<string>', args=None, _optimize=-1):
"""Generates compiled Python code object from DistAlgo source string.
'src' is the DistAlgo source string to compile. Optional argument 'filename'
is the filename that appears in error messages. Optional argument 'args' is
a Namespace object containing the command line parameters for the compiler.
Returns the compiled Python code object, or None in case of errors.
"""
pyast = dastr_to_pyast(src, filename, args)
if pyast is not None:
return _pyast_to_pycode(pyast, filename, _optimize)
else:
return None
def dafile_to_pystr(filename, args=None):
"""Generates executable Python code from DistAlgo source string.
'filename' is the filename of DistAlgo source file. Optional argument 'args'
is a Namespace object containing the command line parameters for the
compiler. Returns the generated Python code as a string.
"""
pyast = dafile_to_pyast(filename, args)
if pyast is not None:
return to_source(pyast)
else:
return None
def dastr_to_pystr(src, filename='<str>', args=None):
"""Generates executable Python code from DistAlgo source string.
'src' is the DistAlgo source string to parse. Optional argument 'filename'
is the filename that appears in error messages. Optional argument 'args'
is a Namespace object containing the command line parameters for the
compiler. Returns the generated Python code as a string.
"""
pyast = dastr_to_pyast(src, filename, args)
if pyast is not None:
return to_source(pyast)
else:
return None
def _sanitize_filename(dfile, no_symlink=True):
"""Check and sanitize 'dfile' for use as a target file.
"""
dirname, basename = os.path.split(dfile)
dirname = os.path.abspath(dirname)
dfile = os.path.join(dirname, basename)
if no_symlink:
if os.path.islink(dfile):
msg = ('{} is a symlink and will be changed into a regular file if '
'the compiler writes a compiled file to it')
raise FileExistsError(msg.format(dfile))
elif os.path.exists(dfile) and not os.path.isfile(dfile):
msg = ('{} is a non-regular file and will be changed into a regular '
'one if the compiler writes a compiled file to it')
raise FileExistsError(msg.format(dfile))
os.makedirs(dirname, exist_ok=True)
return dfile
def dafile_to_pseudofile(filename, outname=None, args=None):
"""Compiles a DistAlgo source file to Python file.
'filename' is the input DistAlgo source file. Optional parameter 'outname'
specifies the file to write the result to. If 'outname' is None the
filename is inferred by replacing the suffix of 'filename' with '.py'.
"""
purename, _, suffix = filename.rpartition(".")
if len(purename) == 0:
purename = suffix
suffix = ""
if suffix == "py":
stderr.write("Warning: skipping '.py' file %s\n" % filename)
return
elif suffix != DA_SUFFIX:
stderr.write("Warning: unknown suffix '%s' in filename '%s'\n" %
(suffix, filename))
if outname is None:
outname = purename + ".dap"
outname = _sanitize_filename(outname)
daast = daast_from_file(filename, args)
if daast:
with open(outname, "w", encoding='utf-8') as outfd:
DastUnparser(daast, outfd)
stderr.write("Written pseudo code file %s.\n"% outname)
def dafile_to_pyfile(filename, outname=None, args=None):
"""Compiles a DistAlgo source file to Python file.
If specified, 'args' should be an object (usually generated by argparse from
the command line arguments) that contains the following properties:
'args.filename' is the input DistAlgo source file. Optional property
'args.outname' specifies the file to write the result to. If
'args.outname' is None the filename is inferred by replacing the suffix of
'args.filename' with '.py'.
"""
purename, _, suffix = filename.rpartition(".")
if len(purename) == 0:
purename = suffix
suffix = ""
if suffix == "py":
stderr.write("Warning: skipping '.py' file %s\n" % filename)
return
elif suffix != DA_SUFFIX:
stderr.write("Warning: unknown suffix '%s' in filename '%s'\n" %
(suffix, filename))
if outname is None:
outname = purename + ".py"
outname = _sanitize_filename(outname)
pyast = dafile_to_pyast(filename, args)
if pyast is not None:
with open(outname, "w", encoding='utf-8') as outfd:
global OutputSize
OutputSize += to_file(pyast, outfd)
stderr.write("Written compiled file %s.\n"% outname)
return 0
else:
return 1
def dafile_to_pycfile(filename, outname=None, optimize=-1, args=None,
dfile=None):
"""Byte-compile one DistAlgo source file to Python bytecode.
"""
import importlib._bootstrap_external
if outname is None:
if optimize >= 0:
opt = '' if optimize == 0 else optimize
outname = da_cache_from_source(filename, optimization=opt)
else:
outname = da_cache_from_source(filename)
outname = _sanitize_filename(outname)
code = dafile_to_pycode(filename, args, _optimize=optimize, dfile=dfile)
if code is not None:
source_stats = os.stat(filename)
PythonVersion = sys.version_info
if PythonVersion < (3, 7):
bytecode = importlib._bootstrap_external._code_to_bytecode(
code, source_stats.st_mtime, source_stats.st_size)
else:
bytecode = importlib._bootstrap_external._code_to_timestamp_pyc(
code, source_stats.st_mtime, source_stats.st_size)
mode = importlib._bootstrap_external._calc_mode(filename)
importlib._bootstrap_external._write_atomic(outname, bytecode, mode)
stderr.write("Written bytecode file {}.\n".format(outname))
return 0
else:
return 1
def check_python_version():
if sys.version_info < (3, 5):
stderr.write("DistAlgo requires Python version 3.5 or newer.\n")
return False
else:
return True
def dafile_to_incfiles(args):
"""Compiles a DistAlgo source file to Python file and generate an interface
file for incrementalization.
'args' is the object generated by argparse from the command line
arguments, and should contain the following properties:
'filename' is the input DistAlgo source file. Optional property 'outname'
specifies the file to write the result to. If 'outname' is None the
filename is inferred by replacing the suffix of 'filename' with '.py'.
Optional property 'incname' is the file to write the incrementalization
module to. If 'incname' is None it defaults to the base of 'filename'
plus '_inc.py'.
"""
filename = args.infile
outname = args.outfile
incname = args.incfile
purename, _, suffix = filename.rpartition(".")
if len(purename) == 0:
purename = suffix
suffix = ""
if suffix == "py":
stderr.write("Warning: skipping '.py' file %s\n" % filename)
return 2
elif suffix != DA_SUFFIX:
stderr.write("Warning: unknown suffix '%s' in filename '%s'\n" %
(suffix, filename))
daast = daast_from_file(filename, args)
if outname is None:
outname = purename + ".py"
if incname is None:
incname = purename + "_inc.py"
if daast is not None:
global OutputSize
inc, ast = gen_inc_module(daast, args, filename=incname)
with open(outname, "w", encoding='utf-8') as outfd:
OutputSize += to_file(ast, outfd)
stderr.write("Written compiled file %s.\n"% outname)
with open(incname, "w", encoding='utf-8') as outfd:
OutputSize += to_file(inc, outfd)
stderr.write("Written interface file %s.\n" % incname)
return 0
else:
return 1
def _add_compiler_args(parser):
"""Install the set of options affecting compilation.
"""
ap = parser
ap.add_argument('--full-event-pattern',
help="If set, use the 'full' format "
"(TYPE, (CLK, DST, SRC), MSG) for event patterns;"
"otherwise, use 'short' format (MSG, SRC)",
action='store_true')
ap.add_argument('--enable-object-pattern',
help="Enable the use of object-style tuple pattern syntax:"
" Object(ARGS...); which is equivalent to "
"('Object', ARGS...)",
action='store_true')
ap.add_argument('--enable-membertest-pattern',
help="Overloads the Python 'in' operator to support using "
"tuple patterns, e.g.: '(_a, 1, _) in S', which is "
"equivalent to 'some((_a, 1, _) in S)'",
action='store_true')
ap.add_argument('--enable-iterator-pattern',
help="Overloads the Python 'for ... in ...' keywords to "
"support using tuple patterns in the target, "
"e.g.: '[b for (_a, 1, b) in S]', which is equivalent to "
"'[b for (var1, var2, b) in S if var1 == a if var2 == b]'",
action='store_true')
ap.add_argument('--use-top-semantic',
help="Use 'top' semantics for query variable and "
"parameter resolution. Under 'top' semantics, only "
"parameters to the top-level query are marked.",
action='store_true')
ap.add_argument('--no-table1',
help="Disable table 1 quantification transformations. "
"Only used when '-i' is enabled.",
action='store_true')
ap.add_argument('--no-table2',
help="Disable table 2 quantification transformations. "
"Only used when '-i' is enabled.",
action='store_true')
ap.add_argument('--no-table3',
help="Disable table 3 quantification transformations. "
"Only used when '-i' is enabled.",
action='store_true')
ap.add_argument('--no-table4',
help="Disable table 4 quantification transformations. "
"Only used when '-i' is enabled.",
action='store_true')
ap.add_argument('--jb-style',
help="Generate Jon-friendly quantification transformations. "
"Only useful with '-i'.",
action='store_true')
ap.add_argument('--no-all-tables',
help="Disable all quantification transformations. "
"Only useful with '-i'.",
action='store_true')
ap.add_argument('--module-name', type=str, default='__main__',
help="Name of the module to be compiled.")
def parse_compiler_args(argv):
ap = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
_add_compiler_args(ap)
args = ap.parse_args(argv)
return args
def main(argv=None):
"""Main entry point when invoking compiler module from command line.
"""
if not check_python_version():
return 2
if argv is None:
argv = sys.argv[1:]
ap = argparse.ArgumentParser(description="DistAlgo compiler.",
argument_default=argparse.SUPPRESS)
_add_compiler_args(ap)
ap.add_argument('-o', help="Output file name.",
dest="outfile", default=None)
ap.add_argument('-L', help="Logging output level.",
dest="debug", default=None)
ap.add_argument('-i',
help="Generate interface code for plugging"
" into incrementalizer.",
action='store_true', dest="geninc", default=False)
ap.add_argument("-m", "--inc-module-name",
help="name of the incrementalized interface module, "
"defaults to source module name + '_inc'. ",
dest="incfile", default=None)
ap.add_argument('-O', '--optimize', type=int, default=-1)
ap.add_argument('-D', '--dump-ast', default=False, action='store_true')
ap.add_argument('-C', '--write-bytecode', default=False, action='store_true')
ap.add_argument('-I', '--interactive',
help="Launch interactive shell.",
action='store_true', default=False)
ap.add_argument('-B', '--benchmark',
help="Print the elapsed wallclock time of the compile session.",
action='store_true', default=False)
ap.add_argument('-p', help="Generate DistAlgo pseudo code.",
action='store_true', dest="genpsd", default=False)
ap.add_argument("-v", "--version", action="version", version=__version__)
ap.add_argument('--psdfile', help="Name of DistAlgo pseudo code output file.",
dest="psdfile", default=None)
ap.add_argument('infile', metavar='SOURCEFILE', type=str,
help="DistAlgo input source file.")
args = ap.parse_args(argv)
if args.benchmark:
global WallclockStart
WallclockStart = time.perf_counter()
if args.interactive:
import code
code.interact()
return
if args.debug is not None:
try:
level = int(args.debug)
if is_valid_debug_level(level):
set_debug_level(level)
else:
raise ValueError()
except ValueError:
stderr.write("Invalid debugging level %s.\n" % str(args.debug))
if args.genpsd:
res = dafile_to_pseudofile(args.infile, args.psdfile, args)
elif args.geninc:
res = dafile_to_incfiles(args)
elif args.write_bytecode:
res = dafile_to_pycfile(args.infile, args.outfile, args.optimize,
args=args)
else:
res = dafile_to_pyfile(args.infile, args.outfile, args)
if args.benchmark:
import json
walltime = time.perf_counter() - WallclockStart
jsondata = {'Wallclock_time' : walltime,
"Input_size" : InputSize,
"Output_size" : OutputSize}
print("###OUTPUT: " + json.dumps(jsondata))
return res | """Translates DistAlgo source file into executable Python AST.
'filename' is the filename of source file. Optional argument 'args' is a
Namespace object containing the command line parameters for the compiler. |
base_controller.go | /**
* Created by GoLand.
* User: link1st
* Date: 2019-07-25
* Time: 12:11
*/
package controllers
import (
"github.com/gin-gonic/gin"
"lab225/common"
"net/http"
)
type BaseController struct {
gin.Context
}
// 获取全部请求解析到map
func Response(c *gin.Context, code uint32, msg string, data map[string]interface{}) {
message := comm | on.Response(code, msg, data)
// 允许跨域
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
c.Header("Access-Control-Allow-Origin", "*") // 这是允许访问所有域
c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE,UPDATE") // 服务器支持的所有跨域请求的方法,为了避免浏览次请求的多次'预检'请求
c.Header("Access-Control-Allow-Headers", "Authorization, Content-Length, X-CSRF-Token, Token,session,X_Requested_With,Accept, Origin, Host, Connection, Accept-Encoding, Accept-Language,DNT, X-CustomHeader, Keep-Alive, User-Agent, X-Requested-With, If-Modified-Since, Cache-Control, Content-Type, Pragma")
c.Header("Access-Control-Expose-Headers", "Content-Length, Access-Control-Allow-Origin, Access-Control-Allow-Headers,Cache-Control,Content-Language,Content-Type,Expires,Last-Modified,Pragma,FooBar") // 跨域关键设置 让浏览器可以解析
c.Header("Access-Control-Allow-Credentials", "true") // 跨域请求是否需要带cookie信息 默认设置为true
c.Set("content-type", "application/json") // 设置返回格式是json
c.JSON(http.StatusOK, message)
return
}
|
|
access_control.gen.go | package service
// This file is auto-generated.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
//
// Definitions file that controls how this file is generated:
// - automation.workflow.yaml
// - automation.yaml
import (
"context"
"fmt"
"github.com/cortezaproject/corteza-server/automation/types"
"github.com/cortezaproject/corteza-server/pkg/actionlog"
"github.com/cortezaproject/corteza-server/pkg/rbac"
"github.com/spf13/cast"
"strings"
)
type (
accessControl struct {
actionlog actionlog.Recorder
rbac interface {
Can(rbac.Session, string, rbac.Resource) bool
Grant(context.Context, ...*rbac.Rule) error
FindRulesByRoleID(roleID uint64) (rr rbac.RuleSet)
}
}
)
func AccessControl() *accessControl {
return &accessControl{
rbac: rbac.Global(),
actionlog: DefaultActionlog,
}
}
func (svc accessControl) can(ctx context.Context, op string, res rbac.Resource) bool {
return svc.rbac.Can(rbac.ContextToSession(ctx), op, res)
}
// Effective returns a list of effective permissions for all given resource
func (svc accessControl) Effective(ctx context.Context, rr ...rbac.Resource) (ee rbac.EffectiveSet) {
for _, res := range rr {
r := res.RbacResource()
for op := range rbacResourceOperations(r) {
ee.Push(r, op, svc.can(ctx, op, res))
}
}
return
}
func (svc accessControl) List() (out []map[string]string) {
def := []map[string]string{
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "read",
},
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "update",
},
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "delete",
},
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "undelete",
},
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "execute",
},
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "triggers.manage",
},
{
"type": types.WorkflowResourceType,
"any": types.WorkflowRbacResource(0),
"op": "sessions.manage",
},
{
"type": types.ComponentResourceType,
"any": types.ComponentRbacResource(),
"op": "grant",
},
{
"type": types.ComponentResourceType,
"any": types.ComponentRbacResource(),
"op": "workflow.create",
},
{
"type": types.ComponentResourceType,
"any": types.ComponentRbacResource(),
"op": "triggers.search",
},
{
"type": types.ComponentResourceType,
"any": types.ComponentRbacResource(),
"op": "sessions.search",
},
{
"type": types.ComponentResourceType,
"any": types.ComponentRbacResource(),
"op": "workflows.search",
},
}
func(svc interface{}) {
if svc, is := svc.(interface{}).(interface{ list() []map[string]string }); is |
}(svc)
return def
}
// Grant applies one or more RBAC rules
//
// This function is auto-generated
func (svc accessControl) Grant(ctx context.Context, rr ...*rbac.Rule) error {
if !svc.CanGrant(ctx) {
// @todo should be altered to check grant permissions PER resource
return AccessControlErrNotAllowedToSetPermissions()
}
for _, r := range rr {
err := rbacResourceValidator(r.Resource, r.Operation)
if err != nil {
return err
}
}
if err := svc.rbac.Grant(ctx, rr...); err != nil {
return AccessControlErrGeneric().Wrap(err)
}
svc.logGrants(ctx, rr)
return nil
}
// This function is auto-generated
func (svc accessControl) logGrants(ctx context.Context, rr []*rbac.Rule) {
if svc.actionlog == nil {
return
}
for _, r := range rr {
g := AccessControlActionGrant(&accessControlActionProps{r})
g.log = r.String()
g.resource = r.Resource
svc.actionlog.Record(ctx, g.ToAction())
}
}
// FindRulesByRoleID find all rules for a specific role
//
// This function is auto-generated
func (svc accessControl) FindRulesByRoleID(ctx context.Context, roleID uint64) (rbac.RuleSet, error) {
if !svc.CanGrant(ctx) {
return nil, AccessControlErrNotAllowedToSetPermissions()
}
return svc.rbac.FindRulesByRoleID(roleID), nil
}
// CanReadWorkflow checks if current user can read workflow
//
// This function is auto-generated
func (svc accessControl) CanReadWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "read", r)
}
// CanUpdateWorkflow checks if current user can update workflow
//
// This function is auto-generated
func (svc accessControl) CanUpdateWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "update", r)
}
// CanDeleteWorkflow checks if current user can delete workflow
//
// This function is auto-generated
func (svc accessControl) CanDeleteWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "delete", r)
}
// CanUndeleteWorkflow checks if current user can undelete workflow
//
// This function is auto-generated
func (svc accessControl) CanUndeleteWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "undelete", r)
}
// CanExecuteWorkflow checks if current user can execute workflow
//
// This function is auto-generated
func (svc accessControl) CanExecuteWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "execute", r)
}
// CanManageTriggersOnWorkflow checks if current user can manage workflow triggers
//
// This function is auto-generated
func (svc accessControl) CanManageTriggersOnWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "triggers.manage", r)
}
// CanManageSessionsOnWorkflow checks if current user can manage workflow sessions
//
// This function is auto-generated
func (svc accessControl) CanManageSessionsOnWorkflow(ctx context.Context, r *types.Workflow) bool {
return svc.can(ctx, "sessions.manage", r)
}
// CanGrant checks if current user can manage automation permissions
//
// This function is auto-generated
func (svc accessControl) CanGrant(ctx context.Context) bool {
return svc.can(ctx, "grant", &types.Component{})
}
// CanCreateWorkflow checks if current user can create workflows
//
// This function is auto-generated
func (svc accessControl) CanCreateWorkflow(ctx context.Context) bool {
return svc.can(ctx, "workflow.create", &types.Component{})
}
// CanSearchTriggers checks if current user can list, search or filter triggers
//
// This function is auto-generated
func (svc accessControl) CanSearchTriggers(ctx context.Context) bool {
return svc.can(ctx, "triggers.search", &types.Component{})
}
// CanSearchSessions checks if current user can list, search or filter sessions
//
// This function is auto-generated
func (svc accessControl) CanSearchSessions(ctx context.Context) bool {
return svc.can(ctx, "sessions.search", &types.Component{})
}
// CanSearchWorkflows checks if current user can list, search or filter workflows
//
// This function is auto-generated
func (svc accessControl) CanSearchWorkflows(ctx context.Context) bool {
return svc.can(ctx, "workflows.search", &types.Component{})
}
// rbacResourceValidator validates known component's resource by routing it to the appropriate validator
//
// This function is auto-generated
func rbacResourceValidator(r string, oo ...string) error {
switch rbac.ResourceType(r) {
case types.WorkflowResourceType:
return rbacWorkflowResourceValidator(r, oo...)
case types.ComponentResourceType:
return rbacComponentResourceValidator(r, oo...)
}
return fmt.Errorf("unknown resource type '%q'", r)
}
// rbacResourceOperations returns defined operations for a requested resource
//
// This function is auto-generated
func rbacResourceOperations(r string) map[string]bool {
switch rbac.ResourceType(r) {
case types.WorkflowResourceType:
return map[string]bool{
"read": true,
"update": true,
"delete": true,
"undelete": true,
"execute": true,
"triggers.manage": true,
"sessions.manage": true,
}
case types.ComponentResourceType:
return map[string]bool{
"grant": true,
"workflow.create": true,
"triggers.search": true,
"sessions.search": true,
"workflows.search": true,
}
}
return nil
}
// rbacWorkflowResourceValidator checks validity of rbac resource and operations
//
// Can be called without operations to check for validity of resource string only
//
// This function is auto-generated
func rbacWorkflowResourceValidator(r string, oo ...string) error {
defOps := rbacResourceOperations(r)
for _, o := range oo {
if !defOps[o] {
return fmt.Errorf("invalid operation '%s' for automation Workflow resource", o)
}
}
if !strings.HasPrefix(r, types.WorkflowResourceType) {
// expecting resource to always include path
return fmt.Errorf("invalid resource type")
}
const sep = "/"
var (
pp = strings.Split(strings.Trim(r[len(types.WorkflowResourceType):], sep), sep)
prc = []string{
"ID",
}
)
if len(pp) != len(prc) {
return fmt.Errorf("invalid resource path structure")
}
for i := 0; i < len(pp); i++ {
if pp[i] != "*" {
if i > 0 && pp[i-1] == "*" {
return fmt.Errorf("invalid resource path wildcard level (%d) for Workflow", i)
}
if _, err := cast.ToUint64E(pp[i]); err != nil {
return fmt.Errorf("invalid reference for %s: '%s'", prc[i], pp[i])
}
}
}
return nil
}
// rbacComponentResourceValidator checks validity of rbac resource and operations
//
// Can be called without operations to check for validity of resource string only
//
// This function is auto-generated
func rbacComponentResourceValidator(r string, oo ...string) error {
defOps := rbacResourceOperations(r)
for _, o := range oo {
if !defOps[o] {
return fmt.Errorf("invalid operation '%s' for automation resource", o)
}
}
if !strings.HasPrefix(r, types.ComponentResourceType) {
// expecting resource to always include path
return fmt.Errorf("invalid resource type")
}
return nil
}
| {
def = append(def, svc.list()...)
} |
XAxis.js | export default {
name: 'XAxis',
props: [
{
name: 'hide',
type: 'Boolean',
defaultVal: 'false',
isOptional: false,
desc: {
'en-US': 'If set true, the axis do not display in the chart.',
'zh-CN': '是否隐藏 x 轴。',
},
}, {
name: 'dataKey',
type: 'String | Number',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'The key of data displayed in the axis.',
'zh-CN': '指定展示的数据维度。',
},
}, {
name: 'xAxisId',
type: 'String | Number',
defaultVal: '0',
isOptional: false,
desc: {
'en-US': 'The unique id of x-axis.',
'zh-CN': 'x 轴的唯一 id。',
},
}, {
name: 'width',
type: 'Number',
defaultVal: '0',
isOptional: false,
desc: {
'en-US': 'The width of axis which is usually calculated internally.',
'zh-CN': 'x 轴的宽度,一般在图表内部计算。',
},
}, {
name: 'height',
type: 'Number',
defaultVal: '30',
isOptional: false,
desc: {
'en-US': 'The height of axis, which can be setted by user.',
'zh-CN': 'x 轴的高度,这个可以根据需要进行配置。',
},
}, {
name: 'orientation',
type: '\'bottom\' , \'top\'',
defaultVal: '\'bottom\'',
isOptional: false,
desc: {
'en-US': 'The orientation of axis',
'zh-CN': 'x 轴的位置。',
},
}, {
name: 'type',
type: '\'number\' | \'category\'',
defaultVal: '\'category\'',
isOptional: false,
desc: {
'en-US': 'The type of axis.',
'zh-CN': 'x 轴的类型:数值轴、类目轴。',
},
}, {
name: 'allowDecimals',
type: 'Boolean',
defaultVal: 'true',
isOptional: false,
desc: {
'en-US': 'Allow the ticks of XAxis to be decimals or not.',
'zh-CN': '是否允许小数类型的刻度。',
},
}, {
name: 'allowDataOverflow',
type: 'Boolean',
defaultVal: 'false',
isOptional: false,
desc: {
'en-US': 'When domain of the axis is specified and the type of the axis is \'number\', if allowDataOverflow is set to be false, the domain will be adjusted when the minimum value of data is smaller than domain[0] or the maximum value of data is greater than domain[1] so that the axis displays all data values. If set to true, graphic elements (line, area, bars) will be clipped to conform to the specified domain.',
'zh-CN': '当轴是数值轴时,指定轴的定义域(domain)的时候,如果 allowDataOverflow 的值为 false,我们会根据数据的最大值和最小值来调整 domain,确保所有的数据能够展示。如果 allowDataOverflow 的值为 true,不会调整 domain ,而是将相应的图形元素会直接裁剪掉。',
},
}, {
name: 'allowDuplicatedCategory',
type: 'Boolean',
defaultVal: 'true',
isOptional: false,
desc: {
'en-US': 'Allow the axis has duplicated categorys or not when the type of axis is "category".',
'zh-CN': '是否允许类目轴有重复的类目。',
},
}, {
name: 'tickCount',
type: 'Number',
defaultVal: '5',
isOptional: false,
desc: {
'en-US': 'The count of axis ticks. Not used if \'type\' is \'category\'.',
'zh-CN': '刻度数。如果\'type\'是\'category\',则不使用。',
},
}, {
name: 'domain',
type: 'Array',
defaultVal: '[0, \'auto\']',
isOptional: true,
desc: {
'en-US': 'Specify the domain of axis when the axis is a number axis. The length of domain should be 2, and we will validate the values in domain. And each element in the array can be a number, \'auto\', \'dataMin\', \'dataMax\', a string like \'dataMin - 20\', \'dataMax + 100\', or a function that accepts a single argument and returns a number. If any element of domain is set to be \'auto\', comprehensible scale ticks will be calculated, and the final domain of axis is generated by the ticks.',
'zh-CN': '当 x 轴是数值轴时,通过这个配置可以指定 x 轴刻度函数的定义域。这个配置是一个二元数组,数组中的元素可以是一个数值,"auto", "dataMin", "dataMax" 或者类似于"dataMin - 100", "dataMax + 200"这样的字符串。如果任意元素的取值为"auto",我们会生成可读性高的刻度,并且保证设置的刻度数。',
},
format: [
'<XAxis type="number" domain={[\'dataMin\', \'dataMax\']} />',
'<XAxis type="number" domain={[0, \'dataMax\']} />',
'<XAxis type="number" domain={[\'auto\', \'auto\']} />',
'<XAxis type="number" domain={[0, \'dataMax + 1000\']} />',
'<XAxis type="number" domain={[\'dataMin - 100\', \'dataMax + 100\']} />',
'<XAxis type="number" domain={[dataMin => (0 - Math.abs(dataMin)), dataMax => (dataMax * 2)]} />',
],
examples: [{
name: 'Numeric XAxis with specified domain',
url: '/examples/VerticalLineChartWithSpecifiedDomain',
}],
}, {
name: 'interval',
type: '"preserveStart" | "preserveEnd" | "preserveStartEnd" | Number',
defaultVal: '\'preserveEnd\'',
isOptional: false,
desc: {
'en-US': 'If set 0, all the ticks will be shown. If set preserveStart", "preserveEnd" or "preserveStartEnd", the ticks which is to be shown or hidden will be calculated automatically.',
'zh-CN': '当值为0时,所有的刻度都会展示。如果值为1,则会间隔1个刻度来展示相应的刻度。当值为 "preserveStart" 时,根据刻度的宽度自动计算间隔,在有足够的空间的情况下,会从头部的刻度开始计算。当值为 "preserveEnd" 时,会从尾部的刻度开始计算是否展示。当值为"preserveStartEnd",会从头部、尾部的刻度开始计算向中间计算是否展示。',
},
}, {
name: 'padding',
type: 'Object',
defaultVal: '{ left: 0, right: 0 }',
isOptional: false,
desc: {
'en-US': 'Specify the padding of x-axis.',
'zh-CN': '指定内边距。',
},
format: [
'<XAxis padding={{ left: 10 }} />',
'<XAxis padding={{ right: 20 }} />',
'<XAxis padding={{ left: 20, right: 20 }} />',
],
examples: [{
name: 'XAxis with non-zero padding',
url: '/examples/LineChartWithXAxisPading',
}],
}, {
name: 'minTickGap',
type: 'Number',
defaultVal: '5',
isOptional: false,
desc: { | }, {
name: 'axisLine',
type: 'Boolean | Object',
defaultVal: 'true',
isOptional: false,
desc: {
'en-US': 'If set false, no axis line will be drawn. If set a object, the option is the configuration of axis line.',
'zh-CN': '轴线配置。当值为 false 时,不绘制轴线。当值为对象类型时,会把这个对象解析成轴线的属性配置。',
},
}, {
name: 'tickLine',
type: 'Boolean | Object',
defaultVal: 'true',
isOptional: false,
desc: {
'en-US': 'If set false, no axis tick lines will be drawn. If set a object, the option is the configuration of tick lines.',
'zh-CN': '刻度线配置。当值为 false 时,不绘制刻度线。当值为对象类型时,会把这个对象解析成刻度线的属性配置。',
},
}, {
name: 'tickSize',
type: 'Number',
defaultVal: '6',
isOptional: false,
desc: {
'en-US': 'The length of tick line.',
'zh-CN': '刻度线的长度。',
},
}, {
name: 'tickFormatter',
type: 'Function',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'The formatter function of tick.',
'zh-CN': '刻度的格式化函数。',
},
}, {
name: 'ticks',
type: 'Array',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'Set the values of axis ticks manually.',
'zh-CN': '指定需要展示的刻度。',
},
}, {
name: 'tick',
type: 'Boolean | Object | ReactElement',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'If set false, no ticks will be drawn. If set a object, the option is the configuration of ticks. If set a React element, the option is the custom react element of drawing ticks.',
'zh-CN': '刻度配置。当值为 false 时,不绘制刻度。当值为对象类型时,会把这个对象解析成刻度的属性配置。当值为 React element,会克隆这个元素来渲染刻度。',
},
format: [
'<XAxis tick={false} />',
'<XAxis tick={{stroke: \'red\', strokeWidth: 2}} />',
'<XAxis tick={<CustomizedTick />} />',
],
examples: [{
name: 'A line chart with customized x-axis tick',
url: '/examples/CustomizedLabelLineChart',
}],
}, {
name: 'mirror',
type: 'Boolean',
defaultVal: 'false',
isOptional: false,
desc: {
'en-US': 'If set true, flips ticks around the axis line, displaying the labels inside the chart instead of outside.',
'zh-CN': '如果值为 true , 将刻度展示在轴的内侧,而不是外侧。',
},
}, {
name: 'reversed',
type: 'Boolean',
defaultVal: 'false',
isOptional: false,
desc: {
'en-US': 'Reverse the ticks or not.',
'zh-CN': '是否反转刻度的顺序',
},
}, {
name: 'label',
type: 'String | Number | ReactElement | Object',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'If set a string or a number, default label will be drawn, and the option is content. If set a React element, the option is the custom react element of drawing label. If an object, the option is the props of a new Label instance.',
'zh-CN': '当值为简单类型的数值或者字符串时,这个值会被渲染成文字标签。当值为 React element,会克隆这个元素来渲染文字标签。如果一个对象,该选项是一个新的Label实例的道具。',
},
format: [
'<XAxis label="Height" />',
'<XAxis label={<CustomizedLabel />} />',
'<XAxis label={{ value: "XAxis Label" }} />',
],
examples: [
{
name: 'A composed chart with axis labels',
url: '/examples/ComposedChartWithAxisLabels',
},
],
}, {
name: 'scale',
type: `'auto' | 'linear' | 'pow' | 'sqrt' | 'log' | 'identity' | 'time' |
'band' | 'point' | 'ordinal' | 'quantile' | 'quantize' | 'utc' | 'sequential' |
'threshold' | Function`,
defaultVal: 'auto',
isOptional: false,
desc: {
'en-US': `If set to 'auto', the scale function is decided by the type of chart, and the props type.
When set to 'time', make sure to also set type to 'number' and to include a domain.`,
'zh-CN': '当值为 "auto" 时,会根据图表类型来生成 scale 函数,也可以传入自定义的函数作为 scale 函数。',
},
format: [
'<XAxis scale="log" />',
`
import { scaleLog } from 'd3-scale';
const scale = scaleLog().base(Math.E);
...
<XAxis scale={scale} />
...
`,
],
}, {
name: 'unit',
type: 'String | Number',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'The unit of data displayed in the axis. This option will be used to represent an index unit in a scatter chart.',
'zh-CN': 'x 轴展示数据的单位。这个单位会展示在 Tooltip 的数值后面。',
},
}, {
name: 'name',
type: 'String | Number',
defaultVal: 'null',
isOptional: true,
desc: {
'en-US': 'The name of data displayed in the axis. This option will be used to represent an index in a scatter chart.',
'zh-CN': 'x 轴展示数据的名称。这个单位会展示在 Tooltip 内容中。',
},
}, {
name: 'onClick',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of click on the ticks of this axis',
'zh-CN': '刻度 click 事件的回调函数。',
},
}, {
name: 'onMouseDown',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of mousedown on the the ticks of this axis',
'zh-CN': '刻度 mousedown 事件的回调函数。',
},
}, {
name: 'onMouseUp',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of mouseup on the ticks of this axis',
'zh-CN': '刻度 mouseup 事件的回调函数。',
},
}, {
name: 'onMouseMove',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of mousemove on the ticks of this axis',
'zh-CN': '刻度 mousemove 事件的回调函数。',
},
}, {
name: 'onMouseOver',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of mouseover on the ticks of this axis',
'zh-CN': '刻度 mouseover 事件的回调函数。',
},
}, {
name: 'onMouseOut',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of mouseout on the ticks of this axis',
'zh-CN': '刻度 mouseout 事件的回调函数。',
},
}, {
name: 'onMouseEnter',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of moustenter on the ticks of this axis',
'zh-CN': '刻度 moustenter 事件的回调函数。',
},
}, {
name: 'onMouseLeave',
type: 'Function',
isOptional: true,
desc: {
'en-US': 'The customized event handler of mouseleave on the ticks of this axis',
'zh-CN': '刻度 mouseleave 事件的回调函数。',
},
}, {
name: 'tickMargin',
type: 'Number',
isOptional: true,
desc: {
'en-US': 'The margin between tick line and tick.',
'zh-CN': '刻度线和刻度之间的间隔。',
},
},
],
parentComponents: [
'AreaChart', 'BarChart', 'LineChart', 'ComposedChart',
'ScatterChart',
],
childrenComponents: [
'Label',
],
}; | 'en-US': 'The minimum gap between two adjacent labels.',
'zh-CN': '两个刻度之前最小间隔宽度。',
}, |
get_transaction_directdebits_id_recalls_recall_id_submissions_submission_id_responses.go | // Code generated by go-swagger; DO NOT EDIT.
package direct_debits
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
strfmt "github.com/go-openapi/strfmt"
"github.com/form3tech-oss/go-form3/v3/pkg/generated/models"
)
// GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDReader is a Reader for the GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionID structure.
type GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewGetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
default:
return nil, runtime.NewAPIError("unknown error", response, response.Code())
}
}
// NewGetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK creates a GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK with default headers values
func NewGetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK() *GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK |
/*GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK handles this case with default header values.
Recall submission details
*/
type GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK struct {
//Payload
// isStream: false
*models.DirectDebitRecallSubmissionDetailsResponse
}
func (o *GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK) Error() string {
return fmt.Sprintf("[GET /transaction/directdebits/{id}/recalls/{recallId}/submissions/{submissionId}][%d] getTransactionDirectdebitsIdRecallsRecallIdSubmissionsSubmissionIdOK", 200)
}
func (o *GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.DirectDebitRecallSubmissionDetailsResponse = new(models.DirectDebitRecallSubmissionDetailsResponse)
// response payload
if err := consumer.Consume(response.Body(), o.DirectDebitRecallSubmissionDetailsResponse); err != nil && err != io.EOF {
return err
}
return nil
}
| {
return &GetTransactionDirectdebitsIDRecallsRecallIDSubmissionsSubmissionIDOK{}
} |
set_op.rs | use crate::{interval::*, simd::*};
impl Interval {
/// Returns $\hull(\self ∪ \rhs)$, the tightest interval that contains both `self` and `rhs` as its subsets.
///
/// | | $\rhs = ∅$ | $\rhs = \[c, d\]$ |
/// | :----------------: | :--------: | :------------------------------------: |
/// | $\self = ∅$ | $∅$ | $\[c, d\]$ |
/// | $\self = \[a, b\]$ | $\[a, b\]$ | $\[\min \set{a, c}, \max \set{b, d}\]$ |
#[must_use]
pub fn convex_hull(self, rhs: Self) -> Self {
if self.is_empty() {
return rhs;
}
if rhs.is_empty() {
return self;
}
// [min(a, c), max(b, d)]
// = [-min(a, c); max(b, d)] = [max(-a, -c); max(b, d)] = .max([-a; b], [-c; d])
Self {
rep: max(self.rep, rhs.rep),
}
}
/// Returns $\self ∩ \rhs$, the intersection of `self` and `rhs`.
///
/// | | $\rhs = ∅$ | $\rhs = \[c, d\]$ |
/// | :----------------: | :--------: | :------------------------------------: |
/// | $\self = ∅$ | $∅$ | $∅$ |
/// | $\self = \[a, b\]$ | $∅$ | $\[\max \set{a, c}, \min \set{b, d}\]$ |
#[must_use]
pub fn intersection(self, rhs: Self) -> Self {
if self.either_empty(rhs) {
return Self::EMPTY;
}
// [max(a, c), min(b, d)]
// = [-max(a, c); min(b, d)] = [min(-a, -c); min(b, d)] = .min([-a; b], [-c; d])
let i = Self {
rep: min(self.rep, rhs.rep),
};
if i.inf_raw() > i.sup_raw() {
Self::EMPTY
} else {
i
}
}
}
macro_rules! impl_dec {
($f:ident) => {
#[doc = concat!("Applies [`Interval::", stringify!($f), "`] to the interval parts of `self` and `rhs`")]
/// and returns the result decorated with [`Decoration::Trv`].
///
/// A NaI is returned if `self` or `rhs` is NaI.
#[must_use]
pub fn $f(self, rhs: Self) -> Self {
if self.is_nai() || rhs.is_nai() {
return Self::NAI;
}
Self::new_unchecked(self.x.$f(rhs.x), Decoration::Trv)
} | }
impl DecInterval {
impl_dec!(convex_hull);
impl_dec!(intersection);
}
#[cfg(test)]
mod tests {
use crate::*;
use DecInterval as DI;
use Interval as I;
#[test]
fn empty() {
assert_eq!(I::EMPTY.convex_hull(I::PI), I::PI);
assert_eq!(I::PI.convex_hull(I::EMPTY), I::PI);
assert!(I::EMPTY.intersection(I::PI).is_empty());
assert!(I::PI.intersection(I::EMPTY).is_empty());
assert_eq!(DI::EMPTY.convex_hull(DI::PI), DI::PI);
assert_eq!(DI::PI.convex_hull(DI::EMPTY), DI::PI);
assert!(DI::EMPTY.intersection(DI::PI).is_empty());
assert!(DI::PI.intersection(DI::EMPTY).is_empty());
}
#[test]
fn nai() {
assert!(DI::NAI.convex_hull(DI::PI).is_nai());
assert!(DI::PI.convex_hull(DI::NAI).is_nai());
assert!(DI::NAI.intersection(DI::PI).is_nai());
assert!(DI::PI.intersection(DI::NAI).is_nai());
}
} | }; |
REF_M_30806+30807+30808_Specular_all_set_1.py | # Cross-section: Off_Off
# Run:30806
######################################################################
#Python Script Generated by GeneratePythonScript Algorithm
######################################################################
LoadEventNexus(Filename='/SNS/REF_M/IPTS-21391/nexus/REF_M_30794.nxs.h5',
OutputWorkspace='raw_events')
FilterByLogValue(InputWorkspace='raw_events',
OutputWorkspace='30794_entry-Off_Off',
LogName='BL4A:SF:ICP:getDI',
MinimumValue=15,
MaximumValue=15,
TimeTolerance=0.10000000000000001,
LogBoundary='Left')
GroupWorkspaces(InputWorkspaces='30794_entry-Off_Off',
OutputWorkspace='wsg')
LoadEventNexus(Filename='/SNS/REF_M/IPTS-21391/nexus/REF_M_30806.nxs.h5',
OutputWorkspace='raw_events')
FilterByLogValue(InputWorkspace='raw_events',
OutputWorkspace='30806_entry-Off_Off',
LogName='BL4A:SF:ICP:getDI',
MinimumValue=15,
MaximumValue=15,
TimeTolerance=0.10000000000000001,
LogBoundary='Left')
GroupWorkspaces(InputWorkspaces='30806_entry-Off_Off',
OutputWorkspace='wsg')
MagnetismReflectometryReduction(InputWorkspace='wsg',
NormalizationWorkspace='30794_entry-Off_Off',
SignalPeakPixelRange='181,195',
SubtractSignalBackground=False,
SignalBackgroundPixelRange='49,88',
NormPeakPixelRange='202,216',
SubtractNormBackground=False,
NormBackgroundPixelRange='94,104',
LowResDataAxisPixelRange='69,172',
LowResNormAxisPixelRange='71,175',
TimeAxisRange='11761.6,44977.7',
RoundUpPixel=False,
SpecularPixel=187.30000000000001,
FinalRebin=False,
QMin=0.001,
QStep=-0.02,
TimeAxisStep=400,
ConstQTrim=0.10000000000000001,
OutputWorkspace='r30806')
Scale(InputWorkspace='r30806',
OutputWorkspace='r30806',
Factor=1.6068516739750773)
Scale(InputWorkspace='r30806',
OutputWorkspace='r30806_scaled',
Factor=2.4266100000000002)
AddSampleLog(Workspace='r30806_scaled',
LogName='scaling_factor',
LogText='2.42661',
LogType='Number')
# Run:30807
######################################################################
#Python Script Generated by GeneratePythonScript Algorithm
######################################################################
LoadEventNexus(Filename='/SNS/REF_M/IPTS-21391/nexus/REF_M_30794.nxs.h5',
OutputWorkspace='raw_events')
FilterByLogValue(InputWorkspace='raw_events',
OutputWorkspace='30794_entry-Off_Off',
LogName='BL4A:SF:ICP:getDI',
MinimumValue=15,
MaximumValue=15,
TimeTolerance=0.10000000000000001,
LogBoundary='Left')
GroupWorkspaces(InputWorkspaces='30794_entry-Off_Off',
OutputWorkspace='wsg')
LoadEventNexus(Filename='/SNS/REF_M/IPTS-21391/nexus/REF_M_30807.nxs.h5',
OutputWorkspace='raw_events')
FilterByLogValue(InputWorkspace='raw_events',
OutputWorkspace='30807_entry-Off_Off',
LogName='BL4A:SF:ICP:getDI',
MinimumValue=15,
MaximumValue=15,
TimeTolerance=0.10000000000000001,
LogBoundary='Left')
GroupWorkspaces(InputWorkspaces='30807_entry-Off_Off',
OutputWorkspace='wsg')
GroupWorkspaces(InputWorkspaces='30807_entry-Off_Off',
OutputWorkspace='wsg')
MagnetismReflectometryReduction(InputWorkspace='wsg',
NormalizationWorkspace='30794_entry-Off_Off',
SignalPeakPixelRange='181,195',
SubtractSignalBackground=False,
SignalBackgroundPixelRange='49,88',
NormPeakPixelRange='202,216',
SubtractNormBackground=False,
NormBackgroundPixelRange='94,104',
LowResDataAxisPixelRange='69,172',
LowResNormAxisPixelRange='71,175',
TimeAxisRange='11768.3,45065.6',
RoundUpPixel=False,
SpecularPixel=187.80000000000001,
FinalRebin=False,
QMin=0.001,
QStep=-0.02,
TimeAxisStep=400,
ConstQTrim=0.10000000000000001,
OutputWorkspace='r30807') | Factor=0.57943431658725553)
Scale(InputWorkspace='r30807',
OutputWorkspace='r30807_scaled',
Factor=0.61799999999999999)
AddSampleLog(Workspace='r30807_scaled',
LogName='scaling_factor',
LogText='0.618',
LogType='Number')
# Run:30808
######################################################################
#Python Script Generated by GeneratePythonScript Algorithm
######################################################################
LoadEventNexus(Filename='/SNS/REF_M/IPTS-21391/nexus/REF_M_30796.nxs.h5',
OutputWorkspace='raw_events')
FilterByLogValue(InputWorkspace='raw_events',
OutputWorkspace='30796_entry-Off_Off',
LogName='BL4A:SF:ICP:getDI',
MinimumValue=15,
MaximumValue=15,
TimeTolerance=0.10000000000000001,
LogBoundary='Left')
GroupWorkspaces(InputWorkspaces='30796_entry-Off_Off',
OutputWorkspace='wsg')
LoadEventNexus(Filename='/SNS/REF_M/IPTS-21391/nexus/REF_M_30808.nxs.h5',
OutputWorkspace='raw_events')
FilterByLogValue(InputWorkspace='raw_events',
OutputWorkspace='30808_entry-Off_Off',
LogName='BL4A:SF:ICP:getDI',
MinimumValue=15,
MaximumValue=15,
TimeTolerance=0.10000000000000001,
LogBoundary='Left')
GroupWorkspaces(InputWorkspaces='30808_entry-Off_Off',
OutputWorkspace='wsg')
MagnetismReflectometryReduction(InputWorkspace='wsg',
NormalizationWorkspace='30796_entry-Off_Off',
SignalPeakPixelRange='183,197',
SubtractSignalBackground=False,
SignalBackgroundPixelRange='49,88',
NormPeakPixelRange='202,216',
SubtractNormBackground=False,
NormBackgroundPixelRange='94,104',
LowResDataAxisPixelRange='69,172',
LowResNormAxisPixelRange='82,175',
TimeAxisRange='11733.5,45058.3',
RoundUpPixel=False,
SpecularPixel=189,
FinalRebin=False,
QMin=0.001,
QStep=-0.02,
TimeAxisStep=400,
ConstQTrim=0.10000000000000001,
OutputWorkspace='r30808')
Scale(InputWorkspace='r30808',
OutputWorkspace='r30808',
Factor=0.19100143649212772)
Scale(InputWorkspace='r30808',
OutputWorkspace='r30808_scaled',
Factor=0.72276980360217025)
AddSampleLog(Workspace='r30808_scaled',
LogName='scaling_factor',
LogText='0.722769803602',
LogType='Number') | Scale(InputWorkspace='r30807',
OutputWorkspace='r30807', |
content_world.go | package webkit
// #include "content_world.h"
import "C"
import (
"unsafe"
"github.com/hsiafan/cocoa/foundation"
"github.com/hsiafan/cocoa/objc"
)
type ContentWorld interface {
objc.Object
Name() string
}
type WKContentWorld struct {
objc.NSObject
}
func MakeContentWorld(ptr unsafe.Pointer) WKContentWorld {
return WKContentWorld{
NSObject: objc.MakeObject(ptr),
}
}
func AllocContentWorld() WKContentWorld {
result_ := C.C_WKContentWorld_AllocContentWorld()
return MakeContentWorld(result_)
}
func (w WKContentWorld) Autorelease() WKContentWorld {
result_ := C.C_WKContentWorld_Autorelease(w.Ptr())
return MakeContentWorld(result_)
}
func (w WKContentWorld) Retain() WKContentWorld {
result_ := C.C_WKContentWorld_Retain(w.Ptr())
return MakeContentWorld(result_)
}
func | (name string) WKContentWorld {
result_ := C.C_WKContentWorld_ContentWorld_WorldWithName(foundation.NewString(name).Ptr())
return MakeContentWorld(result_)
}
func ContentWorld_DefaultClientWorld() WKContentWorld {
result_ := C.C_WKContentWorld_ContentWorld_DefaultClientWorld()
return MakeContentWorld(result_)
}
func ContentWorld_PageWorld() WKContentWorld {
result_ := C.C_WKContentWorld_ContentWorld_PageWorld()
return MakeContentWorld(result_)
}
func (w WKContentWorld) Name() string {
result_ := C.C_WKContentWorld_Name(w.Ptr())
return foundation.MakeString(result_).String()
}
| ContentWorld_WorldWithName |
graph.rs | use std::collections::HashMap;
use crate::graph_description::{Edge, EdgeList, GeneratedSubgraphs, Graph, Node};
use crate::node::NodeT;
impl Graph {
pub fn new(timestamp: u64) -> Self {
Graph {
nodes: HashMap::new(),
edges: HashMap::new(),
timestamp,
}
}
pub fn is_empty(&self) -> bool {
self.nodes.is_empty() && self.edges.is_empty()
}
pub fn merge(&mut self, other: &Graph) {
self.edges.extend(other.edges.clone());
for (node_key, other_node) in other.nodes.iter() {
self.nodes
.entry(node_key.clone())
.and_modify(|node| {
node.merge(other_node);
})
.or_insert_with(|| other_node.clone());
}
}
pub fn add_node<N>(&mut self, node: N)
where
N: Into<Node>,
{
let node = node.into();
let key = node.clone_node_key();
self.nodes.insert(key.to_string(), node);
self.edges
.entry(key)
.or_insert_with(|| EdgeList { edges: vec![] });
}
pub fn with_node<N>(mut self, node: N) -> Graph
where
N: Into<Node>,
|
pub fn add_edge(
&mut self,
edge_name: impl Into<String>,
from: impl Into<String>,
to: impl Into<String>,
) {
let from = from.into();
let to = to.into();
let edge_name = edge_name.into();
let edge = Edge {
from: from.clone(),
to,
edge_name,
};
self.edges
.entry(from)
.or_insert_with(|| EdgeList {
edges: Vec::with_capacity(1),
})
.edges
.push(edge);
}
}
impl GeneratedSubgraphs {
pub fn new(subgraphs: Vec<Graph>) -> GeneratedSubgraphs {
GeneratedSubgraphs { subgraphs }
}
}
| {
self.add_node(node);
self
} |
login.ts | import { Component } from '@angular/core';
import { NavController } from 'ionic-angular';
import { HomePage } from '../home/home';
import { AMIPage } from '../a-mi/a-mi';
import { CadastroClientePage } from '../cadastro-cliente/cadastro-cliente';
import { CadastroCuidadorPage } from '../cadastro-cuidador/cadastro-cuidador';
@Component({
selector: 'page-login',
templateUrl: 'login.html'
})
export class | {
constructor(public navCtrl: NavController) {
}
goToHome(params){
if (!params) params = {};
this.navCtrl.push(HomePage);
}goToAMI(params){
if (!params) params = {};
this.navCtrl.push(AMIPage);
}goToCadastroCliente(params){
if (!params) params = {};
this.navCtrl.push(CadastroClientePage);
}goToCadastroCuidador(params){
if (!params) params = {};
this.navCtrl.push(CadastroCuidadorPage);
}
}
| LoginPage |
sock_split.rs | use std::{
io::{self, BufReader, Write},
net::{Shutdown, TcpStream},
sync::Arc,
};
use rustls::Session;
/// Wrapper supporting reads of a shared TcpStream.
pub struct ArcTcpRead(Arc<TcpStream>);
impl io::Read for ArcTcpRead {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
(&*self.0).read(buf)
}
}
impl std::ops::Deref for ArcTcpRead {
type Target = TcpStream;
fn deref(&self) -> &Self::Target {
self.0.deref()
}
}
/// Wrapper around a TCP Stream supporting buffered reads.
pub struct BufStream(BufReader<ArcTcpRead>);
impl io::Read for BufStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
}
}
impl io::Write for BufStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.get_ref().write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.get_ref().flush()
}
}
impl BufStream {
/// Unwrap into the internal BufReader.
fn into_reader(self) -> BufReader<ArcTcpRead> {
self.0
}
/// Returns a reference to the underlying TcpStream.
fn get_ref(&self) -> &TcpStream {
&*self.0.get_ref().0
}
}
pub enum ReadStream {
Tcp(BufReader<ArcTcpRead>),
Tls(rustls_split::ReadHalf<rustls::ServerSession>),
}
impl io::Read for ReadStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
Self::Tcp(reader) => reader.read(buf),
Self::Tls(read_half) => read_half.read(buf),
}
}
}
impl ReadStream {
pub fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.get_ref().shutdown(how),
Self::Tls(write_half) => write_half.shutdown(how),
}
}
}
pub enum WriteStream {
Tcp(Arc<TcpStream>),
Tls(rustls_split::WriteHalf<rustls::ServerSession>),
}
impl WriteStream {
pub fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.shutdown(how),
Self::Tls(write_half) => write_half.shutdown(how),
}
}
}
impl io::Write for WriteStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
Self::Tcp(stream) => stream.as_ref().write(buf),
Self::Tls(write_half) => write_half.write(buf),
}
}
fn flush(&mut self) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.as_ref().flush(),
Self::Tls(write_half) => write_half.flush(),
}
}
}
pub struct TlsBoxed {
stream: BufStream,
session: rustls::ServerSession,
}
impl TlsBoxed {
fn rustls_stream(&mut self) -> rustls::Stream<rustls::ServerSession, BufStream> {
rustls::Stream::new(&mut self.session, &mut self.stream)
}
}
pub enum BidiStream {
Tcp(BufStream),
/// This variant is boxed, because [`rustls::ServerSession`] is quite larger than [`BufStream`].
Tls(Box<TlsBoxed>),
}
impl BidiStream { | pub fn from_tcp(stream: TcpStream) -> Self {
Self::Tcp(BufStream(BufReader::new(ArcTcpRead(Arc::new(stream)))))
}
pub fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.get_ref().shutdown(how),
Self::Tls(tls_boxed) => {
if how == Shutdown::Read {
tls_boxed.stream.get_ref().shutdown(how)
} else {
tls_boxed.session.send_close_notify();
let res = tls_boxed.rustls_stream().flush();
tls_boxed.stream.get_ref().shutdown(how)?;
res
}
}
}
}
/// Split the bi-directional stream into two owned read and write halves.
pub fn split(self) -> (ReadStream, WriteStream) {
match self {
Self::Tcp(stream) => {
let reader = stream.into_reader();
let stream: Arc<TcpStream> = reader.get_ref().0.clone();
(ReadStream::Tcp(reader), WriteStream::Tcp(stream))
}
Self::Tls(tls_boxed) => {
let reader = tls_boxed.stream.into_reader();
let buffer_data = reader.buffer().to_owned();
let read_buf_cfg = rustls_split::BufCfg::with_data(buffer_data, 8192);
let write_buf_cfg = rustls_split::BufCfg::with_capacity(8192);
// TODO would be nice to avoid the Arc here
let socket = Arc::try_unwrap(reader.into_inner().0).unwrap();
let (read_half, write_half) =
rustls_split::split(socket, tls_boxed.session, read_buf_cfg, write_buf_cfg);
(ReadStream::Tls(read_half), WriteStream::Tls(write_half))
}
}
}
pub fn start_tls(self, mut session: rustls::ServerSession) -> io::Result<Self> {
match self {
Self::Tcp(mut stream) => {
session.complete_io(&mut stream)?;
assert!(!session.is_handshaking());
Ok(Self::Tls(Box::new(TlsBoxed { stream, session })))
}
Self::Tls { .. } => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"TLS is already started on this stream",
)),
}
}
}
impl io::Read for BidiStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
Self::Tcp(stream) => stream.read(buf),
Self::Tls(tls_boxed) => tls_boxed.rustls_stream().read(buf),
}
}
}
impl io::Write for BidiStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
Self::Tcp(stream) => stream.write(buf),
Self::Tls(tls_boxed) => tls_boxed.rustls_stream().write(buf),
}
}
fn flush(&mut self) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.flush(),
Self::Tls(tls_boxed) => tls_boxed.rustls_stream().flush(),
}
}
} | |
validate.ts | import oneline from 'oneline'
import {isPlainObject} from 'lodash'
import {randomKey, toString as pathToString} from '@sanity/util/paths'
import {Template} from './Template'
import {TemplateParameter} from './TemplateParameters'
import {getDefaultSchema} from './parts/Schema'
export {validateInitialObjectValue, validateTemplates}
const ALLOWED_REF_PROPS = ['_key', '_ref', '_weak', '_type']
const REQUIRED_TEMPLATE_PROPS: (keyof Template)[] = ['id', 'title', 'schemaType', 'value']
function validateTemplates(templates: Template[]) {
const idMap = new Map()
templates.forEach((template, i) => {
const id = templateId(template, i)
if (typeof (template as {[key: string]: any}).values !== 'undefined' && !template.value) {
throw new Error(`Template ${id} is missing "value" property, but contained "values". Typo?`)
}
const missing = REQUIRED_TEMPLATE_PROPS.filter((prop) => !template[prop])
if (missing.length > 0) {
throw new Error(`Template ${id} is missing required properties: ${missing.join(', ')}`)
}
if (typeof template.value !== 'function' && !isPlainObject(template.value)) {
throw new Error(
`Template ${id} has an invalid "value" property; should be a function or an object`
)
}
if (typeof template.parameters !== 'undefined') {
if (Array.isArray(template.parameters)) {
template.parameters.forEach((param, j) => validateParameter(param, template, j))
} else {
throw new Error(`Template ${id} has an invalid "parameters" property; must be an array`)
}
}
if (idMap.has(template.id)) {
const dupeIndex = idMap.get(template.id)
const dupe = `${quote(templates[dupeIndex].title)} at index ${dupeIndex}`
throw new Error(
`Template "${template.title}" at index ${i} has the same ID ("${template.id}") as template ${dupe}`
)
}
idMap.set(template.id, i)
})
return templates
}
function | (template: Template, i: number) {
return quote(template.id || template.title) || (typeof i === 'number' && `at index ${i}`) || ''
}
function quote(str: string) {
return str && str.length > 0 ? `"${str}"` : str
}
function validateInitialObjectValue<T extends Record<string, unknown>>(
value: T,
template: Template
): T {
const contextError = (msg: string) => `Template "${template.id}" initial value: ${msg}`
if (!isPlainObject(value)) {
throw new Error(contextError(`resolved to a non-object`))
}
if (value._type && template.schemaType !== value._type) {
throw new Error(
contextError(oneline`
includes "_type"-property (${value._type})
that does not match template (${template.schemaType})
`)
)
}
try {
return validateValue(value)
} catch (err) {
err.message = contextError(err.message)
throw err
}
}
function validateValue(value: any, path: (string | number)[] = [], parentIsArray = false): any {
if (Array.isArray(value)) {
return value.map((item, i) => {
if (Array.isArray(item)) {
throw new Error(
`multidimensional arrays are not supported (at path "${pathToString(path)}")`
)
}
return validateValue(item, path.concat(i), true)
})
}
if (!isPlainObject(value)) {
return value
}
// Apply missing keys is the parent is an array
const initial: {[key: string]: any} = parentIsArray && !value._key ? {_key: randomKey()} : {}
// Ensure non-root objects have _type
if (path.length > 0 && !value._type) {
if (value._ref) {
// In the case of references, we know what the type should be, so apply it
initial._type = 'reference'
} else {
// todo: consider if we need to re-instantiate this. It currently makes the valid case of having an initial object value for a field fail
// throw new Error(`missing "_type" property at path "${pathToString(path)}"`)
}
}
if (value._ref) {
validateReference(value, path)
}
// Validate deeply
return Object.keys(value).reduce((acc, key) => {
acc[key] = validateValue(value[key], path.concat([key]))
return acc
}, initial)
}
function validateParameter(parameter: TemplateParameter, template: Template, index: number) {
const schema = getDefaultSchema()
if (!parameter.name) {
throw new Error(
`Template ${template.id} has a parameter at index ${index} that is missing its "name"-property`
)
}
// I know, this is a weird one
if (parameter.name === 'template') {
throw new Error(
`Template parameters cannot be named "template", see parameter #${index} for template ${template.id}`
)
}
if (!schema.get(parameter.type)) {
throw new Error(
`Template parameter "${parameter.name}" has an invalid/unknown type: "${parameter.type}"`
)
}
}
function validateReference(value, path: (string | number)[] = []) {
if (!value._type && value.type) {
throw new Error(
`Reference is missing "_type", but has a "type" property at path "${pathToString(path)}"`
)
}
const disallowed = Object.keys(value).filter((key) => !ALLOWED_REF_PROPS.includes(key))
if (disallowed.length > 0) {
const plural = disallowed.length > 1 ? 'properties' : 'property'
throw new Error(
`Disallowed ${plural} found in reference: ${disallowed
.map(quote)
.join(', ')} at path "${pathToString(path)}"`
)
}
}
| templateId |
s_text_api_settings.py | # This file was auto generated; Do not modify, if you value your sanity!
import ctypes
# can1_options
class can1_options(ctypes.Union):
_pack_ = 2
_fields_ = [
('bExtended', ctypes.c_uint32, 1), # [Bitfield]
('DWord', ctypes.c_uint32),
]
# Extra names go here:
# End of extra names
# can2_options
class can2_options(ctypes.Union):
_pack_ = 2
_fields_ = [
('bExtended', ctypes.c_uint32, 1), # [Bitfield]
('DWord', ctypes.c_uint32),
]
# Extra names go here:
# End of extra names
# can3_options
class can3_options(ctypes.Union):
_pack_ = 2
_fields_ = [
('bExtended', ctypes.c_uint32, 1), # [Bitfield]
('DWord', ctypes.c_uint32),
]
# Extra names go here:
# End of extra names
# can4_options
class can4_options(ctypes.Union):
_pack_ = 2
_fields_ = [
('bExtended', ctypes.c_uint32, 1), # [Bitfield]
('DWord', ctypes.c_uint32),
]
# Extra names go here:
# End of extra names | _fields_ = [
('can1_tx_id', ctypes.c_uint32),
('can1_rx_id', ctypes.c_uint32),
('can1_options', can1_options),
('can2_tx_id', ctypes.c_uint32),
('can2_rx_id', ctypes.c_uint32),
('can2_options', can2_options),
('network_enables', ctypes.c_uint32),
('can3_tx_id', ctypes.c_uint32),
('can3_rx_id', ctypes.c_uint32),
('can3_options', can3_options),
('can4_tx_id', ctypes.c_uint32),
('can4_rx_id', ctypes.c_uint32),
('can4_options', can4_options),
('reserved', ctypes.c_uint32 * 5),
]
# Extra names go here:
STextAPISettings = s_text_api_settings
# End of extra names |
class s_text_api_settings(ctypes.Structure):
_pack_ = 2
_anonymous_ = ("can1_options", "can2_options", "can3_options", "can4_options",) |
inline_query_result_gif.rs | // WARNING: THIS CODE IS AUTOGENERATED.
// DO NOT EDIT!!!
use crate::types::{InlineKeyboardMarkup, InputMessageContent, MessageEntity};
use serde::{Deserialize, Serialize};
/// Represents a link to an animated GIF file. By default, this animated GIF file will be sent by the user with optional caption. Alternatively, you can use input_message_content to send a message with the specified content instead of the animation.
/// <https://core.telegram.org/bots/api#inlinequeryresultgif>
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct | {
/// Type of the result, must be gif
pub r#type: String,
/// Unique identifier for this result, 1-64 bytes
pub id: String,
/// A valid URL for the GIF file. File size must not exceed 1MB
pub gif_url: String,
/// Optional. Width of the GIF
#[serde(skip_serializing_if = "Option::is_none")]
pub gif_width: Option<i64>,
/// Optional. Height of the GIF
#[serde(skip_serializing_if = "Option::is_none")]
pub gif_height: Option<i64>,
/// Optional. Duration of the GIF in seconds
#[serde(skip_serializing_if = "Option::is_none")]
pub gif_duration: Option<i64>,
/// URL of the static (JPEG or GIF) or animated (MPEG4) thumbnail for the result
pub thumb_url: String,
/// Optional. MIME type of the thumbnail, must be one of "image/jpeg", "image/gif", or "video/mp4". Defaults to "image/jpeg"
#[serde(skip_serializing_if = "Option::is_none")]
pub thumb_mime_type: Option<String>,
/// Optional. Title for the result
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
/// Optional. Caption of the GIF file to be sent, 0-1024 characters after entities parsing
#[serde(skip_serializing_if = "Option::is_none")]
pub caption: Option<String>,
/// Optional. Mode for parsing entities in the caption. See formatting options for more details.
#[serde(skip_serializing_if = "Option::is_none")]
pub parse_mode: Option<String>,
/// Optional. List of special entities that appear in the caption, which can be specified instead of parse_mode
#[serde(skip_serializing_if = "Option::is_none")]
pub caption_entities: Option<Vec<MessageEntity>>,
/// Optional. Inline keyboard attached to the message
#[serde(skip_serializing_if = "Option::is_none")]
pub reply_markup: Option<InlineKeyboardMarkup>,
/// Optional. Content of the message to be sent instead of the GIF animation
#[serde(skip_serializing_if = "Option::is_none")]
pub input_message_content: Option<InputMessageContent>,
}
| InlineQueryResultGif |
execution_context_test.py | # Lint as: python3
# Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import contextlib
from absl.testing import absltest
import numpy as np
import tensorflow as tf
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.api import intrinsics
from tensorflow_federated.python.core.impl.compiler import type_factory
from tensorflow_federated.python.core.impl.executors import execution_context
from tensorflow_federated.python.core.impl.executors import executor_stacks
tf.compat.v1.enable_v2_behavior()
@contextlib.contextmanager
def _execution_context(num_clients=None):
executor_factory = executor_stacks.local_executor_factory(num_clients)
yield execution_context.ExecutionContext(executor_factory)
class RetryableErrorTest(absltest.TestCase):
def test_is_retryable_error(self):
retryable_error = execution_context.RetryableError()
self.assertTrue(execution_context._is_retryable_error(retryable_error))
self.assertFalse(execution_context._is_retryable_error(TypeError()))
self.assertFalse(execution_context._is_retryable_error(1))
self.assertFalse(execution_context._is_retryable_error('a'))
self.assertFalse(execution_context._is_retryable_error(None))
class ExecutionContextIntegrationTest(absltest.TestCase):
def test_simple_no_arg_tf_computation_with_int_result(self):
@computations.tf_computation
def comp():
return tf.constant(10)
with _execution_context():
result = comp()
self.assertEqual(result, 10)
def test_one_arg_tf_computation_with_int_param_and_result(self):
@computations.tf_computation(tf.int32)
def comp(x):
return tf.add(x, 10)
with _execution_context():
result = comp(3)
self.assertEqual(result, 13)
def test_three_arg_tf_computation_with_int_params_and_result(self):
@computations.tf_computation(tf.int32, tf.int32, tf.int32)
def comp(x, y, z):
return tf.multiply(tf.add(x, y), z)
with _execution_context():
result = comp(3, 4, 5)
self.assertEqual(result, 35)
def test_tf_computation_with_dataset_params_and_int_result(self):
@computations.tf_computation(computation_types.SequenceType(tf.int32))
def comp(ds):
return ds.reduce(np.int32(0), lambda x, y: x + y)
with _execution_context():
ds = tf.data.Dataset.range(10).map(lambda x: tf.cast(x, tf.int32))
result = comp(ds)
self.assertEqual(result, 45)
def test_tf_computation_with_structured_result(self):
@computations.tf_computation
def comp():
return collections.OrderedDict([
('a', tf.constant(10)),
('b', tf.constant(20)),
])
with _execution_context():
result = comp()
self.assertIsInstance(result, collections.OrderedDict)
self.assertDictEqual(result, {'a': 10, 'b': 20})
def test_with_temperature_sensor_example(self):
@computations.tf_computation(
computation_types.SequenceType(tf.float32), tf.float32)
def count_over(ds, t):
return ds.reduce(
np.float32(0), lambda n, x: n + tf.cast(tf.greater(x, t), tf.float32))
@computations.tf_computation(computation_types.SequenceType(tf.float32))
def count_total(ds):
|
@computations.federated_computation(
type_factory.at_clients(computation_types.SequenceType(tf.float32)),
type_factory.at_server(tf.float32))
def comp(temperatures, threshold):
return intrinsics.federated_mean(
intrinsics.federated_map(
count_over,
intrinsics.federated_zip(
[temperatures,
intrinsics.federated_broadcast(threshold)])),
intrinsics.federated_map(count_total, temperatures))
with _execution_context():
to_float = lambda x: tf.cast(x, tf.float32)
temperatures = [
tf.data.Dataset.range(10).map(to_float),
tf.data.Dataset.range(20).map(to_float),
tf.data.Dataset.range(30).map(to_float),
]
threshold = 15.0
result = comp(temperatures, threshold)
self.assertAlmostEqual(result, 8.333, places=3)
num_clients = 3
with _execution_context(num_clients):
to_float = lambda x: tf.cast(x, tf.float32)
temperatures = [
tf.data.Dataset.range(10).map(to_float),
tf.data.Dataset.range(20).map(to_float),
tf.data.Dataset.range(30).map(to_float),
]
threshold = 15.0
result = comp(temperatures, threshold)
self.assertAlmostEqual(result, 8.333, places=3)
def test_changing_cardinalities_across_calls(self):
@computations.federated_computation(type_factory.at_clients(tf.int32))
def comp(x):
return x
five_ints = list(range(5))
ten_ints = list(range(10))
with _execution_context():
five = comp(five_ints)
ten = comp(ten_ints)
self.assertEqual(five, five_ints)
self.assertEqual(ten, ten_ints)
def test_conflicting_cardinalities_within_call(self):
@computations.federated_computation(
[type_factory.at_clients(tf.int32),
type_factory.at_clients(tf.int32)])
def comp(x):
return x
five_ints = list(range(5))
ten_ints = list(range(10))
with _execution_context():
with self.assertRaisesRegex(ValueError, 'Conflicting cardinalities'):
comp([five_ints, ten_ints])
if __name__ == '__main__':
absltest.main()
| return ds.reduce(np.float32(0.0), lambda n, _: n + 1.0) |
line.rs | use super::super::LineState;
pub fn copy_line(state: &mut LineState) {
state.clipboard.push(state.characters.clone());
}
pub fn downcase_line(state: &mut LineState) {
let contents: Vec<char> = state
.characters
.drain(..)
.flat_map(|c| c.to_lowercase())
.collect();
state.characters = contents;
}
pub fn upcase_line(state: &mut LineState) {
let contents: Vec<char> = state
.characters
.drain(..)
.flat_map(|c| c.to_uppercase())
.collect();
state.characters = contents;
}
fn first_non_whitespace(state: &LineState) -> Option<usize> {
state.characters.iter().position(|c| !c.is_whitespace())
}
fn last_non_whitespace(state: &LineState) -> Option<usize> {
let len = state.characters.len();
state
.characters
.iter()
.rev()
.position(|c| !c.is_whitespace())
.map(|i| len - i)
}
fn trim_to(state: &mut LineState, left: Option<usize>, right: Option<usize>) {
if let (Some(left), Some(right)) = (left, right) {
if left != right |
} else {
state.characters.truncate(0);
}
}
pub fn trim_line(state: &mut LineState) {
rtrim_line(state);
ltrim_line(state);
}
pub fn ltrim_line(state: &mut LineState) {
let right = first_non_whitespace(state);
trim_to(state, Some(0), right);
}
pub fn rtrim_line(state: &mut LineState) {
let left = last_non_whitespace(state);
let right = Some(state.characters.len());
trim_to(state, left, right);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn trim_line_cmd() {
let mut state1 = LineState::new(" hello ");
let mut state2 = LineState::new("hejsan ");
let mut state3 = LineState::new(" good bye");
trim_line(&mut state1);
assert_eq!("hello", state1.text());
trim_line(&mut state2);
assert_eq!("hejsan", state2.text());
trim_line(&mut state3);
assert_eq!("good bye", state3.text());
}
}
| {
let _ = state.characters.drain(left..right).last();
} |
test_show_versions.py | import json
import os
import re
import pytest
from pandas.compat import (
IS64,
is_ci_environment,
)
from pandas.util._print_versions import (
_get_dependency_info,
_get_sys_info,
)
import pandas as pd
@pytest.mark.filterwarnings(
# openpyxl
"ignore:defusedxml.lxml is no longer supported:DeprecationWarning"
)
@pytest.mark.filterwarnings(
# html5lib
"ignore:Using or importing the ABCs from:DeprecationWarning"
)
@pytest.mark.filterwarnings(
# fastparquet
"ignore:pandas.core.index is deprecated:FutureWarning"
)
@pytest.mark.filterwarnings(
# pandas_datareader
"ignore:pandas.util.testing is deprecated:FutureWarning"
)
@pytest.mark.filterwarnings(
# https://github.com/pandas-dev/pandas/issues/35252
"ignore:Distutils:UserWarning"
)
@pytest.mark.filterwarnings("ignore:Setuptools is replacing distutils:UserWarning")
def test_show_versions(tmpdir):
# GH39701
as_json = os.path.join(tmpdir, "test_output.json")
pd.show_versions(as_json=as_json)
with open(as_json) as fd:
# check if file output is valid JSON, will raise an exception if not
result = json.load(fd)
# Basic check that each version element is found in output
expected = {
"system": _get_sys_info(),
"dependencies": _get_dependency_info(),
}
assert result == expected
def test_show_versions_console_json(capsys):
# GH39701
pd.show_versions(as_json=True)
stdout = capsys.readouterr().out
# check valid json is printed to the console if as_json is True
result = json.loads(stdout)
# Basic check that each version element is found in output
expected = {
"system": _get_sys_info(),
"dependencies": _get_dependency_info(),
}
assert result == expected
@pytest.mark.xfail(
is_ci_environment() and not IS64, reason="Failing on 32 bit Python CI job"
)
def test_show_versions_console(capsys):
# gh-32041
# gh-32041 | # check header
assert "INSTALLED VERSIONS" in result
# check full commit hash
assert re.search(r"commit\s*:\s[0-9a-f]{40}\n", result)
# check required dependency
# 2020-12-09 npdev has "dirty" in the tag
# 2022-05-25 npdev released with RC wo/ "dirty".
# Just ensure we match [0-9]+\..* since npdev version is variable
assert re.search(r"numpy\s*:\s[0-9]+\..*\n", result)
# check optional dependency
assert re.search(r"pyarrow\s*:\s([0-9\.]+|None)\n", result)
def test_json_output_match(capsys, tmpdir):
# GH39701
pd.show_versions(as_json=True)
result_console = capsys.readouterr().out
out_path = os.path.join(tmpdir, "test_json.json")
pd.show_versions(as_json=out_path)
with open(out_path) as out_fd:
result_file = out_fd.read()
assert result_console == result_file | pd.show_versions(as_json=False)
result = capsys.readouterr().out
|
test_urls.py | from django.test import TestCase
from django.urls import reverse | class TestUrls(TestCase):
def test_report(self):
self.assertEqual("/api/v1/report", reverse("report")) | |
data.py | import enum
from dataclasses import dataclass
from typing import Dict, List, Optional, Tuple
from serde import serde
from . import imported
@serde
@dataclass(unsafe_hash=True)
class Int:
"""
Integer.
"""
i: int
@serde
@dataclass(unsafe_hash=True)
class Str:
"""
String.
"""
s: str
@serde
@dataclass(unsafe_hash=True)
class Float:
"""
Float.
"""
f: float
@serde
@dataclass(unsafe_hash=True)
class Bool:
"""
Boolean.
"""
b: bool
@serde
@dataclass(unsafe_hash=True)
class Pri:
"""
Primitives.
"""
i: int
s: str
f: float
b: bool
@serde
class PriOpt:
"""
Optional Primitives.
"""
i: Optional[int]
s: Optional[str]
f: Optional[float]
b: Optional[bool]
@serde
class PriList:
"""
List containing primitives.
"""
i: List[int]
s: List[str]
f: List[float]
b: List[bool]
@serde
class PriDict:
"""
Dict containing primitives.
"""
i: Dict[int, int]
s: Dict[str, str]
f: Dict[float, float]
b: Dict[bool, bool]
@serde
class PriTuple:
"""
Tuple containing primitives.
"""
i: Tuple[int, int, int]
s: Tuple[str, str, str, str]
f: Tuple[float, float, float, float, float]
b: Tuple[bool, bool, bool, bool, bool, bool]
@serde
@dataclass(unsafe_hash=True)
class NestedInt:
"""
Nested integer.
"""
i: Int
@serde
@dataclass(unsafe_hash=True)
class NestedPri:
"""
Nested primitives.
"""
i: Int
s: Str
f: Float
b: Bool
@serde
class | :
"""
Optional Primitives.
"""
i: Optional[Int]
s: Optional[Str]
f: Optional[Float]
b: Optional[Bool]
@serde
class NestedPriList:
"""
List containing nested primitives.
"""
i: List[Int]
s: List[Str]
f: List[Float]
b: List[Bool]
@serde
class NestedPriDict:
"""
Dict containing nested primitives.
"""
i: Dict[Str, Int]
s: Dict[Str, Str]
f: Dict[Str, Float]
b: Dict[Str, Bool]
@serde
class NestedPriTuple:
"""
Tuple containing nested primitives.
"""
i: Tuple[Int, Int, Int]
s: Tuple[Str, Str, Str, Str]
f: Tuple[Float, Float, Float, Float, Float]
b: Tuple[Bool, Bool, Bool, Bool, Bool, Bool]
@serde
@dataclass(unsafe_hash=True)
class PriDefault:
"""
Primitives.
"""
i: int = 10
s: str = 'foo'
f: float = 100.0
b: bool = True
@serde
class OptDefault:
"""
Optionals.
"""
n: Optional[int] = None
i: Optional[int] = 10
class E(enum.Enum):
S = 'foo'
F = 10.0
B = True
class IE(enum.IntEnum):
V0 = enum.auto()
V1 = enum.auto()
V2 = 10
V3 = 100
@serde
class EnumInClass:
"""
Class having enum fields.
"""
e: IE = IE.V2
o: Optional[E] = E.S
i: imported.IE = imported.IE.V1
ListPri = List[Pri]
DictPri = Dict[str, Pri]
INT = Int(10)
STR = Str('foo')
FLOAT = Float(100.0)
BOOL = Bool(True)
PRI = Pri(10, 'foo', 100.0, True)
PRI_TUPLE = (10, 'foo', 100.0, True)
PRILIST = ([10], ['foo'], [100.0], [True])
NESTED_PRILIST = ([INT], [STR], [FLOAT], [BOOL])
NESTED_PRILIST_TUPLE = ([(10,)], [('foo',)], [(100.0,)], [(True,)])
| NestedPriOpt |
conftest.py | # pylint: disable=unused-import
import os
import docker
import pytest
from dagster_celery_k8s.launcher import CeleryK8sRunLauncher
from dagster_k8s_test_infra.helm import TEST_AWS_CONFIGMAP_NAME
from dagster_k8s_test_infra.integration_utils import image_pull_policy
from dagster_test.test_project import build_and_tag_test_image, get_test_project_docker_image
from dagster_k8s_test_infra.cluster import ( # isort:skip
dagster_instance,
dagster_instance_for_user_deployments_subchart_disabled,
dagster_instance_for_daemon,
define_cluster_provider_fixture,
helm_postgres_url,
helm_postgres_url_for_user_deployments_subchart_disabled,
helm_postgres_url_for_daemon,
)
pytest_plugins = ["dagster_k8s_test_infra.helm"]
cluster_provider = define_cluster_provider_fixture()
IS_BUILDKITE = os.getenv("BUILDKITE") is not None
@pytest.fixture(scope="session")
def dagster_docker_image():
docker_image = get_test_project_docker_image()
if not IS_BUILDKITE:
try:
client = docker.from_env()
client.images.get(docker_image)
print( # pylint: disable=print-call
"Found existing image tagged {image}, skipping image build. To rebuild, first run: "
"docker rmi {image}".format(image=docker_image)
)
except docker.errors.ImageNotFound:
build_and_tag_test_image(docker_image)
return docker_image
# See: https://stackoverflow.com/a/31526934/324449
def pytest_addoption(parser):
# We catch the ValueError to support cases where we are loading multiple test suites, e.g., in
# the VSCode test explorer. When pytest tries to add an option twice, we get, e.g.
#
# ValueError: option names {'--cluster-provider'} already added
# Use kind or some other cluster provider?
| try:
parser.addoption("--cluster-provider", action="store", default="kind")
except ValueError:
pass
# Specify an existing kind cluster name to use
try:
parser.addoption("--kind-cluster", action="store")
except ValueError:
pass
# Keep resources around after tests are done
try:
parser.addoption("--no-cleanup", action="store_true", default=False)
except ValueError:
pass
# Use existing Helm chart/namespace
try:
parser.addoption("--existing-helm-namespace", action="store")
except ValueError:
pass |
|
api_mock.go | // Code generated by MockGen. DO NOT EDIT.
// Source: github.com/web-platform-tests/wpt.fyi/api/manifest (interfaces: API)
// Package mock_manifest is a generated GoMock package.
package mock_manifest
import (
gomock "github.com/golang/mock/gomock"
shared "github.com/web-platform-tests/wpt.fyi/shared"
reflect "reflect"
time "time"
)
// MockAPI is a mock of API interface
type MockAPI struct {
ctrl *gomock.Controller
recorder *MockAPIMockRecorder
}
// MockAPIMockRecorder is the mock recorder for MockAPI
type MockAPIMockRecorder struct {
mock *MockAPI
}
// NewMockAPI creates a new mock instance
func | (ctrl *gomock.Controller) *MockAPI {
mock := &MockAPI{ctrl: ctrl}
mock.recorder = &MockAPIMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use
func (m *MockAPI) EXPECT() *MockAPIMockRecorder {
return m.recorder
}
// GetManifestForSHA mocks base method
func (m *MockAPI) GetManifestForSHA(arg0 string) (string, []byte, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetManifestForSHA", arg0)
ret0, _ := ret[0].(string)
ret1, _ := ret[1].([]byte)
ret2, _ := ret[2].(error)
return ret0, ret1, ret2
}
// GetManifestForSHA indicates an expected call of GetManifestForSHA
func (mr *MockAPIMockRecorder) GetManifestForSHA(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetManifestForSHA", reflect.TypeOf((*MockAPI)(nil).GetManifestForSHA), arg0)
}
// NewRedis mocks base method
func (m *MockAPI) NewRedis(arg0 time.Duration) shared.ReadWritable {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "NewRedis", arg0)
ret0, _ := ret[0].(shared.ReadWritable)
return ret0
}
// NewRedis indicates an expected call of NewRedis
func (mr *MockAPIMockRecorder) NewRedis(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewRedis", reflect.TypeOf((*MockAPI)(nil).NewRedis), arg0)
}
| NewMockAPI |
main.go | package main
// Definition for a binary tree node.
type TreeNode struct {
Val int
Left *TreeNode
Right *TreeNode
}
func | (root *TreeNode) {
var prev, a, b *TreeNode
recoverTreeInOrder(root, &prev, &a, &b)
a.Val, b.Val = b.Val, a.Val
}
func recoverTreeInOrder(root *TreeNode, prev, a, b **TreeNode) {
if root == nil {
return
}
recoverTreeInOrder(root.Left, prev, a, b)
if (*prev) != nil {
if (*prev).Val > root.Val {
if (*a) == nil {
*a = (*prev)
*b = root
} else {
*b = root
}
}
}
(*prev) = root
recoverTreeInOrder(root.Right, prev, a, b)
}
| recoverTree |
plugin.go | // Copyright 2019 Authors of Hubble
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package portdistribution
import (
"github.com/cilium/hubble/pkg/metrics/api"
)
type portDistributionPlugin struct{}
func (p *portDistributionPlugin) NewHandler() api.Handler {
return &portDistributionHandler{}
}
func (p *portDistributionPlugin) HelpText() string {
return `port-distribution - Port distribution metrics
Reports metrics related to port distribution
Metrics:
hubble_port_distribution_total Number of packets by destination port number
Options:` +
api.ContextOptionsHelp
}
func init() | {
api.DefaultRegistry().Register("port-distribution", &portDistributionPlugin{})
} |
|
msggetcfheaders.go | // Copyright (c) 2017 The btcsuite developers
// Copyright (c) 2018 The bcext developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package wire
import (
"io"
"github.com/bcext/gcash/chaincfg/chainhash"
)
// MsgGetCFHeaders is a message similar to MsgGetHeaders, but for committed
// filter headers. It allows to set the FilterType field to get headers in the
// chain of basic (0x00) or extended (0x01) headers.
type MsgGetCFHeaders struct {
FilterType FilterType
StartHeight uint32
StopHash chainhash.Hash
}
// BtcDecode decodes r using the bitcoin protocol encoding into the receiver.
// This is part of the Message interface implementation.
func (msg *MsgGetCFHeaders) BtcDecode(r io.Reader, pver uint32) error {
err := readElement(r, &msg.FilterType)
if err != nil {
return err
}
err = readElement(r, &msg.StartHeight)
if err != nil {
return err
}
return readElement(r, &msg.StopHash)
}
// BtcEncode encodes the receiver to w using the bitcoin protocol encoding.
// This is part of the Message interface implementation.
func (msg *MsgGetCFHeaders) BtcEncode(w io.Writer, pver uint32) error {
err := writeElement(w, msg.FilterType)
if err != nil {
return err
}
err = writeElement(w, &msg.StartHeight)
if err != nil {
return err
}
return writeElement(w, &msg.StopHash)
}
// Command returns the protocol command string for the message. This is part
// of the Message interface implementation.
func (msg *MsgGetCFHeaders) Command() string {
return CmdGetCFHeaders
}
// MaxPayloadLength returns the maximum length the payload can be for the | return 1 + 4 + chainhash.HashSize
}
// NewMsgGetCFHeaders returns a new bitcoin getcfheader message that conforms to
// the Message interface using the passed parameters and defaults for the
// remaining fields.
func NewMsgGetCFHeaders(filterType FilterType, startHeight uint32,
stopHash *chainhash.Hash) *MsgGetCFHeaders {
return &MsgGetCFHeaders{
FilterType: filterType,
StartHeight: startHeight,
StopHash: *stopHash,
}
} | // receiver. This is part of the Message interface implementation.
func (msg *MsgGetCFHeaders) MaxPayloadLength(pver uint32) uint32 {
// Filter type + uint32 + block hash |
match_enum.rs | #[allow(dead_code)]
enum Color {
Red,
Blue,
Green,
RGB(u32, u32, u32),
HSV(u32, u32, u32),
HSL(u32, u32, u32),
CMY(u32, u32, u32),
CMYK(u32, u32, u32, u32),
}
fn main() {
let color = Color::RGB(122, 17, 40); |
match color {
Color::Red => println!("The color is Red!"),
Color::Blue => println!("The color is Blue!"),
Color::Green => println!("The color is Green!"),
Color::RGB(r, g, b) =>
println!("Red: {}, green: {}, and blue: {}!", r, g, b),
Color::HSV(h, s, v) =>
println!("Hue: {}, saturation: {}, value:{}!", h, s, v),
Color::HSL(h, s, l) =>
println!("Hue: {}, saturation: {}, lightness: {}!", h, s, l),
Color::CMY(c, m, y) =>
println!("Cyan: {}, magenta: {}, yellow: {}!", c, m, y),
Color::CMYK(c, m, y, k) =>
println!("Cyan: {}, magenta: {}, yellow: {}, key (black): {}!",
c, m, y, k),
}
} |
println!("What color is it?"); |
provider.ts | import BN from 'bn.js';
export interface EthereumProvider {
send(method: 'eth_chainId', params: []): Promise<string>;
send(method: 'net_version', params: []): Promise<string>;
send(method: 'eth_getCode', params: [string, string]): Promise<string>;
send(method: 'eth_getStorageAt', params: [string, string, string]): Promise<string>;
send(method: 'eth_getTransactionByHash', params: [string]): Promise<null | EthereumTransaction>;
send(method: string, params: unknown[]): Promise<unknown>;
}
interface EthereumTransaction {
blockHash: string | null;
}
export async function getNetworkId(provider: EthereumProvider): Promise<string> {
return provider.send('net_version', []);
}
export async function getChainId(provider: EthereumProvider): Promise<number> {
const id = await provider.send('eth_chainId', []);
return new BN(id.replace(/^0x/, ''), 'hex').toNumber();
}
export async function getStorageAt(
provider: EthereumProvider,
address: string,
position: string,
block = 'latest',
): Promise<string> {
const storage = await provider.send('eth_getStorageAt', [address, position, block]);
const padded = storage.replace(/^0x/, '').padStart(64, '0');
return '0x' + padded;
}
export async function getCode(provider: EthereumProvider, address: string, block = 'latest'): Promise<string> {
return provider.send('eth_getCode', [address, block]);
} | const code = await getCode(provider, address, block);
return code !== '0x';
}
export async function getTransactionByHash(
provider: EthereumProvider,
txHash: string,
): Promise<EthereumTransaction | null> {
return provider.send('eth_getTransactionByHash', [txHash]);
}
export const networkNames: { [chainId in number]?: string } = Object.freeze({
1: 'mainnet',
2: 'morden',
3: 'ropsten',
4: 'rinkeby',
5: 'goerli',
42: 'kovan',
});
export async function isDevelopmentNetwork(provider: EthereumProvider): Promise<boolean> {
const chainId = await getChainId(provider);
// 1337 => ganache and geth --dev
// 31337 => hardhat network
return chainId === 1337 || chainId === 31337;
} |
export async function hasCode(provider: EthereumProvider, address: string, block?: string): Promise<boolean> { |
0006_auto_20200822_0116.py | # Generated by Django 2.2.15 on 2020-08-22 01:16
from django.db import migrations, models
class Migration(migrations.Migration): | ]
operations = [
migrations.AlterField(
model_name='track',
name='apple_music_id',
field=models.CharField(max_length=255, null=True, unique=True),
),
migrations.AlterField(
model_name='track',
name='spotify_id',
field=models.CharField(max_length=255, null=True, unique=True),
),
] |
dependencies = [
('main', '0005_auto_20200822_0053'), |
publisher.py | #!/usr/bin/env python
from boutiques.validator import validate_descriptor, ValidationError
from boutiques.logger import raise_error, print_info
from boutiques.zenodoHelper import ZenodoError, ZenodoHelper
from boutiques.util.utils import customSortDescriptorByKey
import simplejson as json
import requests
import os
class | ():
def __init__(self, descriptor_file_name, auth_token,
verbose=False, sandbox=False, no_int=False,
replace=False, id=None):
# Straightforward assignments
self.verbose = verbose
self.sandbox = sandbox
self.descriptor_file_name = descriptor_file_name
self.no_int = no_int
self.zenodo_access_token = auth_token
self.zenodo_helper = ZenodoHelper(sandbox, no_int, verbose)
# remove zenodo prefix of ID to update
try:
self.id_to_update = id.split(".", 1)[1] if id else None
except IndexError:
raise_error(ZenodoError, "Zenodo ID must be prefixed by "
"'zenodo', e.g. zenodo.123456")
# Validate and load descriptor
validate_descriptor(descriptor_file_name)
self.descriptor = json.loads(open(self.descriptor_file_name).read())
# Get relevant descriptor properties
self.url = self.descriptor.get('url')
self.tool_doi = self.descriptor.get('tool-doi')
self.descriptor_url = self.descriptor.get('descriptor-url')
self.online_platforms = self.descriptor.get('online-platform-urls')
# Get tool author and check that it's defined
if self.descriptor.get("author") is None:
raise_error(ZenodoError, "Tool must have an author to be "
"published. Add an 'author' property to your "
"descriptor.")
self.creator = self.descriptor['author']
# Get tool container and check that it's defined
if self.descriptor.get("container-image") is None:
raise_error(ZenodoError, "Tool must have a container image to be "
"published. Add a 'container-image' property to your "
"descriptor.")
# If in replace mode, make sure descriptor has a DOI and get the ID.
# Otherwise, make sure the descriptor does not have a DOI.
if replace:
if self.descriptor.get('doi') is None:
raise_error(ZenodoError, "To publish an updated version of a "
"previously published descriptor, the descriptor "
"must contain a DOI. This DOI will be replaced "
"with a new one.")
else:
self.id_to_update = self.descriptor.get('doi').split(".")[-1]
elif self.descriptor.get('doi') is not None:
raise_error(ZenodoError, "Descriptor already has a DOI. Please "
"remove it from the descriptor before publishing it "
"again, or use the --replace flag to publish an "
"updated version. A new DOI will be generated.")
self.config_file = os.path.join(os.path.expanduser('~'), ".boutiques")
# Fix Zenodo access token
self.zenodo_access_token = self.zenodo_helper \
.verify_zenodo_access_token(self.zenodo_access_token)
# Set Zenodo endpoint
self.zenodo_endpoint = self.zenodo_helper.get_zenodo_endpoint()
def zenodo_upload_descriptor(self, deposition_id):
# If in replace mode, remove the old DOI
if self.descriptor.get('doi'):
del self.descriptor['doi']
with open(self.descriptor_file_name, 'w') as fhandle:
fhandle.write(json.dumps(self.descriptor, indent=4))
data = {'filename': os.path.basename(self.descriptor_file_name)}
files = {'file': open(self.descriptor_file_name, 'rb')}
r = requests.post(self.zenodo_endpoint +
'/api/deposit/depositions/%s/files'
% deposition_id,
params={'access_token': self.zenodo_access_token},
data=data,
files=files)
# Status code is inconsistent with Zenodo documentation
if(r.status_code != 201):
raise_error(ZenodoError, "Cannot upload descriptor", r)
if(self.verbose):
print_info("Descriptor uploaded to Zenodo", r)
def publish(self):
if(not self.no_int):
prompt = ("The descriptor will be published to Zenodo, "
"this cannot be undone. Are you sure? (Y/n) ")
ret = input(prompt)
if ret.upper() != "Y":
return
if self.id_to_update is not None:
publish_update = True
else:
# perform a search to check if descriptor is an updated version
# of an existing one
from boutiques.searcher import Searcher
searcher = Searcher(self.descriptor.get("name"), self.verbose,
self.sandbox, exact_match=True)
r = searcher.zenodo_search()
publish_update = False
for hit in r.json()["hits"]["hits"]:
title = hit["metadata"]["title"]
if title == self.descriptor.get("name"):
self.id_to_update = hit["id"]
break
if self.id_to_update is not None:
if(not self.no_int):
prompt = ("Found an existing record with the same name, "
"would you like to update it? "
"(Y:Update existing / n:Publish new entry with "
"name {}) ".format(self.descriptor.get("name")))
ret = input(prompt)
if ret.upper() == "Y":
publish_update = True
else:
publish_update = True
if publish_update:
deposition_id = self.zenodo_helper.zenodo_deposit_updated_version(
self.create_metadata(), self.zenodo_access_token,
self.id_to_update)
else:
deposition_id = self.zenodo_helper.zenodo_deposit(
self.create_metadata(), self.zenodo_access_token)
self.zenodo_upload_descriptor(deposition_id)
self.doi = self.zenodo_helper.zenodo_publish(
self.zenodo_access_token, deposition_id, "Descriptor")
self.descriptor['doi'] = self.doi
with open(self.descriptor_file_name, "w") as f:
f.write(json.dumps(self.descriptor, indent=4))
if os.path.isfile(self.descriptor_file_name):
return "OK"
return False
def create_metadata(self):
data = {
'metadata': {
'title': self.descriptor['name'],
'upload_type': 'software',
'description': self.descriptor['description'] or "Boutiques "
"descriptor for {0}".format(
self.descriptor['name']),
'creators': [{'name': self.creator}],
'version': self.descriptor['tool-version'],
'keywords': ['Boutiques',
'schema-version:{0}'.
format(self.descriptor['schema-version'])]
}
}
keywords = data['metadata']['keywords']
if self.descriptor.get('tags'):
for key, value in self.descriptor.get('tags').items():
# Tag is of form 'tag-name': true, it is a single-string
if isinstance(value, bool):
keywords.append(key)
# Tag is of form 'tag-name':'tag-value', it is a key-value pair
if isinstance(value, str):
keywords.append(key + ":" + value)
# Tag is of form 'tag-name': ['value1', 'value2'], it is a
# list of key-value pairs
elif isinstance(value, list):
keywords += [key + ":" + item for item in value]
if self.descriptor.get('container-image'):
keywords.append(self.descriptor['container-image']['type'])
if self.descriptor.get('tests'):
keywords.append('tested')
if self.descriptor.get('deprecated-by-doi'):
keywords.append('deprecated')
if isinstance(self.descriptor['deprecated-by-doi'], str):
keywords.append('deprecated-by-doi:' +
self.descriptor['deprecated-by-doi'])
self.addRelatedIdentifiers(
data, self.descriptor['deprecated-by-doi'],
'isPreviousVersionOf')
if self.url is not None:
self.addRelatedIdentifiers(data, self.url, 'hasPart')
if self.online_platforms is not None:
for p in self.online_platforms:
self.addRelatedIdentifiers(data, p, 'hasPart')
if self.tool_doi is not None:
self.addRelatedIdentifiers(data, self.tool_doi, 'hasPart')
if self.descriptor_url is not None:
self.addRelatedIdentifiers(data, self.descriptor_url, 'hasPart')
return data
def addRelatedIdentifiers(self, data, identifier, relation):
if data['metadata'].get('related_identifiers') is None:
data['metadata']['related_identifiers'] = []
data['metadata']['related_identifiers'].append({
'identifier': identifier,
'relation': relation
})
| Publisher |
ApiServiceVehicle.js | import axios from 'axios';
const Vehicle_API_BASE_URL = 'http://localhost:8081/api/vehicle';
const Vehicles_API_BASE_URL = 'http://localhost:8081/api/vehicles';
const MESSAGE_API_BASE_URL = 'http://localhost:8081/api/message';
const headers = {
'Accept-Language': 'ar',
};
/* 'Content-Type': 'application/json',
'X-Auth-Token': '97e0d315477f435489cf04904c9d0e6co', */
class | {
fetchVehicles() {
return axios.get(Vehicles_API_BASE_URL );
}
fetchVehicleById(id) {
try {
// Load async data from an inexistent endpoint.
let vehcilerData = axios.get(Vehicle_API_BASE_URL + '/' + id);
return vehcilerData;
} catch (e) {
console.log(`😱 Axios request failed: ${e}`);
}
}
deleteVehicle(vehicleId) {
return axios.delete(Vehicle_API_BASE_URL + '/' + vehicleId);
}
addVehicle(vehicle) {
return axios.post(""+Vehicle_API_BASE_URL, vehicle);
}
editVehicle(vehicle) {
return axios.put(Vehicle_API_BASE_URL + '/' + vehicle.id, vehicle);
}
fetchMessageByName(messageName) {
return axios.get(MESSAGE_API_BASE_URL + '/' + messageName, {headers});
}
fetchTime() {
return axios.get(MESSAGE_API_BASE_URL + '/api/time', {headers});
}
}
export default new ApiServiceVehicle(); | ApiServiceVehicle |
stdio.go | // +build !cloudabi
| package fmt
import (
"os"
)
// Printf formats according to a format specifier and writes to standard output.
// It returns the number of bytes written and any write error encountered.
func Printf(format string, a ...interface{}) (n int, err error) {
return Fprintf(os.Stdout, format, a...)
}
// Print formats using the default formats for its operands and writes to standard output.
// Spaces are added between operands when neither is a string.
// It returns the number of bytes written and any write error encountered.
func Print(a ...interface{}) (n int, err error) {
return Fprint(os.Stdout, a...)
}
// Println formats using the default formats for its operands and writes to standard output.
// Spaces are always added between operands and a newline is appended.
// It returns the number of bytes written and any write error encountered.
func Println(a ...interface{}) (n int, err error) {
return Fprintln(os.Stdout, a...)
}
// Scan scans text read from standard input, storing successive
// space-separated values into successive arguments. Newlines count
// as space. It returns the number of items successfully scanned.
// If that is less than the number of arguments, err will report why.
func Scan(a ...interface{}) (n int, err error) {
return Fscan(os.Stdin, a...)
}
// Scanln is similar to Scan, but stops scanning at a newline and
// after the final item there must be a newline or EOF.
func Scanln(a ...interface{}) (n int, err error) {
return Fscanln(os.Stdin, a...)
}
// Scanf scans text read from standard input, storing successive
// space-separated values into successive arguments as determined by
// the format. It returns the number of items successfully scanned.
// If that is less than the number of arguments, err will report why.
// Newlines in the input must match newlines in the format.
// The one exception: the verb %c always scans the next rune in the
// input, even if it is a space (or tab etc.) or newline.
func Scanf(format string, a ...interface{}) (n int, err error) {
return Fscanf(os.Stdin, format, a...)
} | |
my-accounts.container.js | import ViewContact from './my-accounts.component'
import { compose } from 'recompose'
import { connect } from 'react-redux' |
const mapStateToProps = (state) => {
const myAccounts = accountsWithSendEtherInfoSelector(state)
return {
myAccounts,
}
}
export default compose(withRouter, connect(mapStateToProps))(ViewContact) | import { withRouter } from 'react-router-dom'
import { accountsWithSendEtherInfoSelector } from '../../../../selectors/selectors' |
main.rs | fn main() {
let s = String::from("halo!"); | println!("{}", ms);
let cms = ms.clone();
println!("ms is {} and cms is {}", ms, cms);
let mms = ms;
// from now, ms is moved and shall not be used
println!("mms is {}", mms);
// but for variable in stack, no need to clone
let x = "halo";
let y = x;
println!("x is {} and y is {}", x, y);
println!("s is {}", s);
take_ownership(s);
// the following code will raise a "use of moved value" Error
// println!("s is {}", s);
let n = 8;
copy_it(n);
println!("n is {}", n);
// get ownership
let gs = give_ownership();
println!("got gs as {}", gs);
let feed = String::from("whao");
let tgs = take_and_give_ownership(feed);
println!("tgs is {}", tgs);
// what if we need to return another value and keep use of the variable
// we can return a tuple
let mores = String::from("how long");
let (mores, len) = take_and_give_more(mores);
println!("{}'s length is {}", mores, len);
// but is this not so convenient,we can use *reference*
// in this way we cannot edit it
take_reference(&mores);
println!("after take reference, mores is {}", mores);
// how about we want to edit it?
let mut mus = String::from("I can be borrowed");
borrow_mutable(&mut mus);
println!("after borrow and mut, mus is {}", mus);
let mut onlyone = String::from("hello");
{
// we can borrow it from lower scope,while the higher has no borrow now
let borrowagain = &mut onlyone;
}
// and borrow in higher scope cause in this scope it's not borrowed yet
let notonlyone = &mut onlyone;
// we can only borrow mutable in scope once, the followed code will not work
// let butonlytwo = &mut onlyone;
// {
// let scopeborrow = &mut onlyone;
// }
// in fact, once we have a borrow, either mut or unmut,
// we cannot borrow mut once again
}
fn take_ownership(some_string: String) {
println!("{}", some_string);
}
fn give_ownership() -> String {
let s = String::from("got it");
s
}
fn take_and_give_ownership(some_string: String) -> String {
some_string
}
fn take_and_give_more(some_string: String) -> (String, usize) {
let len = some_string.len();
(some_string, len)
}
fn take_reference(some_string: &String) { // s is a reference to a String
// some_string.push_str(", waoo"); we cant editable a borrow variable
println!("got reference: {}", some_string);
} // Here, s goes out of scope. But because it does not have ownership of what
// it refers to, nothing happens.
fn borrow_mutable(some_string: &mut String) {
some_string.push_str(", waoo");
}
fn copy_it(some_num: i32) {
println!("{}", some_num);
}
// fn cant_return_ref() -> &String {
// let s = String::from("ok");
// &s
// } after the function call, s is gone, the refer may cause a dangling pointer, it's *forbidden*
fn can_return() -> String {
let s = String::from("ok");
s
} | println!("{}", s);
let mut ms = String::from("halo!");
ms.push_str(" walde~"); |
bezier.rs | #[cfg(feature = "dim3")]
use super::TriMesh;
use crate::math::Point;
use na::{self, RealField};
use std::iter;
use std::ptr;
// De-Casteljau algorithm.
// Evaluates the bezier curve with control points `control_points`.
#[doc(hidden)]
pub fn bezier_curve_at<N: RealField>(
control_points: &[Point<N>],
t: N,
cache: &mut Vec<Point<N>>,
) -> Point<N>
{
if control_points.len() > cache.len() {
let diff = control_points.len() - cache.len();
cache.extend(iter::repeat(Point::origin()).take(diff))
}
let cache = &mut cache[..];
let _1: N = na::convert(1.0);
let t_1 = _1 - t;
// XXX: not good if the objects are not POD.
unsafe {
ptr::copy_nonoverlapping(
control_points.as_ptr(),
cache.as_mut_ptr(),
control_points.len(),
);
}
for i in 1usize..control_points.len() {
for j in 0usize..control_points.len() - i {
cache[j] = cache[j] * t_1 + cache[j + 1].coords * t;
}
}
cache[0].clone()
}
// Evaluates the bezier curve with control points `control_points`.
#[cfg(feature = "dim3")]
#[doc(hidden)]
pub fn bezier_surface_at<N: RealField>(
control_points: &[Point<N>],
nupoints: usize,
nvpoints: usize,
u: N,
v: N,
ucache: &mut Vec<Point<N>>,
vcache: &mut Vec<Point<N>>,
) -> Point<N>
where
N: RealField,
{
if vcache.len() < nvpoints {
let diff = nvpoints - vcache.len();
vcache.extend(iter::repeat(Point::origin()).take(diff));
}
// FIXME: start with u or v, depending on which dimension has more control points.
let vcache = &mut vcache[..];
for i in 0..nvpoints {
let start = i * nupoints;
let end = start + nupoints;
vcache[i] = bezier_curve_at(&control_points[start..end], u, ucache);
}
bezier_curve_at(&vcache[0..nvpoints], v, ucache)
}
/// Given a set of control points, generates a (non-rational) Bezier curve.
pub fn bezier_curve<N: RealField>(control_points: &[Point<N>], nsubdivs: usize) -> Vec<Point<N>> |
/// Given a set of control points, generates a (non-rational) Bezier surface.
#[cfg(feature = "dim3")]
pub fn bezier_surface<N: RealField>(
control_points: &[Point<N>],
nupoints: usize,
nvpoints: usize,
usubdivs: usize,
vsubdivs: usize,
) -> TriMesh<N>
where
N: RealField,
{
assert!(nupoints * nvpoints == control_points.len());
let mut surface = super::unit_quad(usubdivs, vsubdivs);
{
let uvs = &surface.uvs.as_ref().unwrap()[..];
let coords = &mut surface.coords[..];
let mut ucache = Vec::new();
let mut vcache = Vec::new();
for j in 0..vsubdivs + 1 {
for i in 0..usubdivs + 1 {
let id = i + j * (usubdivs + 1);
coords[id] = bezier_surface_at(
control_points,
nupoints,
nvpoints,
uvs[id].x,
uvs[id].y,
&mut ucache,
&mut vcache,
)
}
}
// XXX: compute the normals manually.
surface.normals = None;
}
surface
}
| {
let mut coords = Vec::with_capacity(nsubdivs);
let mut cache = Vec::new();
let tstep = na::convert(1.0 / (nsubdivs as f64));
let mut t = na::zero::<N>();
while t <= na::one() {
coords.push(bezier_curve_at(control_points, t, &mut cache));
t = t + tstep;
}
coords
} |
miner_test.go | package api
import (
"io/ioutil"
"testing"
"time"
"unsafe"
"github.com/turtledex/TurtleDexCore/crypto"
"github.com/turtledex/TurtleDexCore/types"
)
// TestMinerGET checks the GET call to the /miner endpoint.
func TestMinerGET(t *testing.T) {
if testing.Short() {
t.SkipNow()
}
t.Parallel()
st, err := createServerTester(t.Name())
if err != nil {
t.Fatal(err)
}
defer st.server.panicClose()
// Get the api returned fields of the miner.
var mg MinerGET
err = st.getAPI("/miner", &mg)
if err != nil {
t.Fatal(err)
}
// Verify the correctness of the results.
blocksMined, staleBlocksMined := st.server.api.miner.BlocksMined()
if mg.BlocksMined != blocksMined {
t.Error("blocks mined did not succeed")
}
if mg.StaleBlocksMined != staleBlocksMined {
t.Error("stale blocks mined is incorrect")
}
if mg.CPUHashrate != st.server.api.miner.CPUHashrate() {
t.Error("mismatched cpu hashrate")
}
if mg.CPUMining != st.server.api.miner.CPUMining() {
t.Error("mismatched cpu miner status")
}
}
// TestMinerStartStop checks that the miner start and miner stop api endpoints
// toggle the cpu miner.
func TestMinerStartStop(t *testing.T) {
if testing.Short() {
t.SkipNow()
}
t.Parallel()
st, err := createServerTester(t.Name())
if err != nil {
t.Fatal(err)
}
defer st.server.panicClose()
// Start the cpu miner, give time for the first hashrate readings to
// appear.
err = st.stdGetAPI("/miner/start")
if err != nil {
t.Fatal(err)
}
time.Sleep(100 * time.Millisecond)
if !st.server.api.miner.CPUMining() {
t.Error("cpu miner is reporting that it is not on")
}
// Check the numbers through the status api call.
var mg MinerGET
err = st.getAPI("/miner", &mg)
if err != nil {
t.Fatal(err)
}
if !mg.CPUMining {
t.Error("cpu is not reporting through the api that it is mining")
}
// Stop the cpu miner and wait for the stop call to go through.
err = st.stdGetAPI("/miner/stop")
if err != nil {
t.Fatal(err)
}
time.Sleep(100 * time.Millisecond)
if st.server.api.miner.CPUMining() {
t.Error("cpu miner is reporting that it is on after being stopped")
}
// Check the numbers through the status api call.
err = st.getAPI("/miner", &mg)
if err != nil {
t.Fatal(err)
}
if mg.CPUMining |
}
// TestMinerHeader checks that the header GET and POST calls are
// useful tools for mining blocks.
func TestMinerHeader(t *testing.T) {
if testing.Short() {
t.SkipNow()
}
t.Parallel()
st, err := createServerTester(t.Name())
if err != nil {
t.Fatal(err)
}
defer st.server.panicClose()
startingHeight := st.cs.Height()
// Get a header that can be used for mining.
resp, err := HttpGET("http://" + st.server.listener.Addr().String() + "/miner/header")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := resp.Body.Close(); err != nil {
t.Fatal(err)
}
}()
targetAndHeader, err := ioutil.ReadAll(resp.Body)
if err != nil {
t.Fatal(err)
}
// Twiddle the header bits until a block has been found.
//
// Note: this test treats the target as hardcoded, if the testing target is
// changed, this test will also need to be changed.
if types.RootTarget[0] != 128 {
t.Fatal("test will fail because the testing constants have been unexpectedly changed")
}
var header [80]byte
copy(header[:], targetAndHeader[32:])
headerHash := crypto.HashObject(header)
for headerHash[0] >= types.RootTarget[0] {
*(*uint64)(unsafe.Pointer(&header[32])) += types.ASICHardforkFactor
headerHash = crypto.HashObject(header)
}
// Submit the solved header through the api and check that the height of
// the blockchain increases.
resp, err = HttpPOST("http://"+st.server.listener.Addr().String()+"/miner/header", string(header[:]))
if err != nil {
t.Fatal(err)
}
defer func() {
if err := resp.Body.Close(); err != nil {
t.Fatal(err)
}
}()
time.Sleep(500 * time.Millisecond)
if st.cs.Height() != startingHeight+1 {
t.Errorf("block height did not increase after trying to mine a block through the api, started at %v and ended at %v", startingHeight, st.cs.Height())
}
}
| {
t.Error("cpu is not reporting through the api that it is mining")
} |
command_webhook_store.go | // Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package sqlstore
import (
"database/sql"
sq "github.com/mattermost/squirrel"
"github.com/pkg/errors"
"github.com/mattermost/mattermost-server/v6/model"
"github.com/mattermost/mattermost-server/v6/shared/mlog"
"github.com/mattermost/mattermost-server/v6/store"
)
type SqlCommandWebhookStore struct {
*SqlStore
}
func | (sqlStore *SqlStore) store.CommandWebhookStore {
return &SqlCommandWebhookStore{sqlStore}
}
func (s SqlCommandWebhookStore) Save(webhook *model.CommandWebhook) (*model.CommandWebhook, error) {
if webhook.Id != "" {
return nil, store.NewErrInvalidInput("CommandWebhook", "id", webhook.Id)
}
webhook.PreSave()
if err := webhook.IsValid(); err != nil {
return nil, err
}
if _, err := s.GetMasterX().NamedExec(`INSERT INTO CommandWebhooks
(Id,CreateAt,CommandId,UserId,ChannelId,RootId,UseCount)
Values
(:Id, :CreateAt, :CommandId, :UserId, :ChannelId, :RootId, :UseCount)`, webhook); err != nil {
return nil, errors.Wrapf(err, "save: id=%s", webhook.Id)
}
return webhook, nil
}
func (s SqlCommandWebhookStore) Get(id string) (*model.CommandWebhook, error) {
var webhook model.CommandWebhook
exptime := model.GetMillis() - model.CommandWebhookLifetime
query := s.getQueryBuilder().
Select("*").
From("CommandWebhooks").
Where(sq.Eq{"Id": id}).
Where(sq.Gt{"CreateAt": exptime})
queryString, args, err := query.ToSql()
if err != nil {
return nil, errors.Wrap(err, "get_tosql")
}
if err := s.GetReplicaX().Get(&webhook, queryString, args...); err != nil {
if err == sql.ErrNoRows {
return nil, store.NewErrNotFound("CommandWebhook", id)
}
return nil, errors.Wrapf(err, "get: id=%s", id)
}
return &webhook, nil
}
func (s SqlCommandWebhookStore) TryUse(id string, limit int) error {
query := s.getQueryBuilder().
Update("CommandWebhooks").
Set("UseCount", sq.Expr("UseCount + 1")).
Where(sq.Eq{"Id": id}).
Where(sq.Lt{"UseCount": limit})
queryString, args, err := query.ToSql()
if err != nil {
return errors.Wrap(err, "tryuse_tosql")
}
if sqlResult, err := s.GetMasterX().Exec(queryString, args...); err != nil {
return errors.Wrapf(err, "tryuse: id=%s limit=%d", id, limit)
} else if rows, _ := sqlResult.RowsAffected(); rows == 0 {
return store.NewErrInvalidInput("CommandWebhook", "id", id)
}
return nil
}
func (s SqlCommandWebhookStore) Cleanup() {
mlog.Debug("Cleaning up command webhook store.")
exptime := model.GetMillis() - model.CommandWebhookLifetime
query := s.getQueryBuilder().
Delete("CommandWebhooks").
Where(sq.Lt{"CreateAt": exptime})
queryString, args, err := query.ToSql()
if err != nil {
mlog.Error("Failed to build query when trying to perform a cleanup in command webhook store.")
return
}
if _, err := s.GetMasterX().Exec(queryString, args...); err != nil {
mlog.Error("Unable to cleanup command webhook store.")
}
}
| newSqlCommandWebhookStore |
post_update_user_name_controller_gen.go | // Package user ...
// generated version: devel
package user
import (
"github.com/go-generalize/api_gen/server_generator/sample/props"
"github.com/labstack/echo/v4"
)
// PostUpdateUserNameController ...
type PostUpdateUserNameController struct {
*props.ControllerProps
}
// NewPostUpdateUserNameController ...
func NewPostUpdateUserNameController(cp *props.ControllerProps) *PostUpdateUserNameController {
p := &PostUpdateUserNameController{
ControllerProps: cp,
}
return p
}
// PostUpdateUserName ...
// @Summary WIP
// @Description WIP
// @Accept json
// @Produce json
// @Param Name body string WIP:${isRequire} WIP:${description}
// @Success 200 {object} PostUpdateUserNameResponse
// @Failure 400 {object} wrapper.APIError
// @Failure 500 {object} wrapper.APIError | // @Router /service/user/update_user_name [POST]
func (p *PostUpdateUserNameController) PostUpdateUserName(
c echo.Context, req *PostUpdateUserNameRequest,
) (res *PostUpdateUserNameResponse, err error) {
// API Error Usage: github.com/go-generalize/api_gen/server_generator/sample/wrapper
//
// return nil, wrapper.NewAPIError(http.StatusBadRequest)
//
// return nil, wrapper.NewAPIError(http.StatusBadRequest).SetError(err)
//
// body := map[string]interface{}{
// "code": http.StatusBadRequest,
// "message": "invalid request parameter.",
// }
// return nil, wrapper.NewAPIError(http.StatusBadRequest, body).SetError(err)
panic("require implements.") // FIXME require implements.
} | |
cloud_pool.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CloudPool(Model):
"""
A pool in the Azure Batch service.
:param id: A string that uniquely identifies the pool within the account.
The id can contain any combination of alphanumeric characters including
hyphens and underscores, and cannot contain more than 64 characters.
:type id: str
:param display_name: The display name for the pool.
:type display_name: str
:param url: The URL of the pool.
:type url: str
:param e_tag: The ETag of the pool.
:type e_tag: str
:param last_modified: The last modified time of the pool.
:type last_modified: datetime
:param creation_time: The creation time of the pool.
:type creation_time: datetime
:param state: The current state of the pool. Possible values include:
'active', 'deleting', 'upgrading'
:type state: str or :class:`PoolState <azure.batch.models.PoolState>`
:param state_transition_time: The time at which the pool entered its
current state.
:type state_transition_time: datetime
:param allocation_state: Whether the pool is resizing. Possible values
include: 'steady', 'resizing', 'stopping'
:type allocation_state: str or :class:`AllocationState
<azure.batch.models.AllocationState>`
:param allocation_state_transition_time: The time at which the pool
entered its current allocation state.
:type allocation_state_transition_time: datetime
:param vm_size: The size of virtual machines in the pool. All virtual
machines in a pool are the same size.
:type vm_size: str
:param cloud_service_configuration: The cloud service configuration for
the pool. This property and VirtualMachineConfiguration are mutually
exclusive and one of the properties must be specified.
:type cloud_service_configuration: :class:`CloudServiceConfiguration
<azure.batch.models.CloudServiceConfiguration>`
:param virtual_machine_configuration: The virtual machine configuration
for the pool. This property and CloudServiceConfiguration are mutually
exclusive and one of the properties must be specified.
:type virtual_machine_configuration: :class:`VirtualMachineConfiguration
<azure.batch.models.VirtualMachineConfiguration>`
:param resize_timeout: The timeout for allocation of compute nodes to the
pool. In a Get Pool operation, this is the timeout for the most recent
resize operation. The default value is 10 minutes.
:type resize_timeout: timedelta
:param resize_error: Details of any error encountered while performing
the last resize on the pool. This property is set only if an error
occurred during the last pool resize, and only when the pool
AllocationState is Steady.
:type resize_error: :class:`ResizeError <azure.batch.models.ResizeError>`
:param current_dedicated: The number of compute nodes currently in the
pool.
:type current_dedicated: int
:param target_dedicated: The desired number of compute nodes in the pool.
This property must have the default value if EnableAutoScale is true. It
is required if EnableAutoScale is false.
:type target_dedicated: int
:param enable_auto_scale: Whether the pool size should automatically
adjust over time. If true, the AutoScaleFormula property must be set. If
false, the TargetDedicated property must be set.
:type enable_auto_scale: bool
:param auto_scale_formula: A formula for the desired number of compute
nodes in the pool.
:type auto_scale_formula: str
:param auto_scale_evaluation_interval: A time interval for the desired
AutoScale evaluation period in the pool.
:type auto_scale_evaluation_interval: timedelta
:param auto_scale_run: The results and errors from the last execution of
the autoscale formula.
:type auto_scale_run: :class:`AutoScaleRun
<azure.batch.models.AutoScaleRun>`
:param enable_inter_node_communication: Whether the pool permits direct
communication between nodes.
:type enable_inter_node_communication: bool
:param start_task: A task specified to run on each compute node as it
joins the pool.
:type start_task: :class:`StartTask <azure.batch.models.StartTask>`
:param certificate_references: The list of certificates to be installed
on each compute node in the pool.
:type certificate_references: list of :class:`CertificateReference
<azure.batch.models.CertificateReference>`
:param application_package_references: The list of application packages
to be installed on each compute node in the pool.
:type application_package_references: list of
:class:`ApplicationPackageReference
<azure.batch.models.ApplicationPackageReference>`
:param max_tasks_per_node: The maximum number of tasks that can run
concurrently on a single compute node in the pool.
:type max_tasks_per_node: int
:param task_scheduling_policy: How the Batch service distributes tasks
between compute nodes in the pool.
:type task_scheduling_policy: :class:`TaskSchedulingPolicy
<azure.batch.models.TaskSchedulingPolicy>`
:param metadata: A list of name-value pairs associated with the pool as
metadata.
:type metadata: list of :class:`MetadataItem
<azure.batch.models.MetadataItem>`
:param stats: Utilization and resource usage statistics for the entire
lifetime of the pool.
:type stats: :class:`PoolStatistics <azure.batch.models.PoolStatistics>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'PoolState'},
'state_transition_time': {'key': 'stateTransitionTime', 'type': 'iso-8601'},
'allocation_state': {'key': 'allocationState', 'type': 'AllocationState'},
'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'cloud_service_configuration': {'key': 'cloudServiceConfiguration', 'type': 'CloudServiceConfiguration'},
'virtual_machine_configuration': {'key': 'virtualMachineConfiguration', 'type': 'VirtualMachineConfiguration'},
'resize_timeout': {'key': 'resizeTimeout', 'type': 'duration'},
'resize_error': {'key': 'resizeError', 'type': 'ResizeError'},
'current_dedicated': {'key': 'currentDedicated', 'type': 'int'},
'target_dedicated': {'key': 'targetDedicated', 'type': 'int'},
'enable_auto_scale': {'key': 'enableAutoScale', 'type': 'bool'},
'auto_scale_formula': {'key': 'autoScaleFormula', 'type': 'str'},
'auto_scale_evaluation_interval': {'key': 'autoScaleEvaluationInterval', 'type': 'duration'},
'auto_scale_run': {'key': 'autoScaleRun', 'type': 'AutoScaleRun'},
'enable_inter_node_communication': {'key': 'enableInterNodeCommunication', 'type': 'bool'},
'start_task': {'key': 'startTask', 'type': 'StartTask'},
'certificate_references': {'key': 'certificateReferences', 'type': '[CertificateReference]'},
'application_package_references': {'key': 'applicationPackageReferences', 'type': '[ApplicationPackageReference]'},
'max_tasks_per_node': {'key': 'maxTasksPerNode', 'type': 'int'},
'task_scheduling_policy': {'key': 'taskSchedulingPolicy', 'type': 'TaskSchedulingPolicy'},
'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
'stats': {'key': 'stats', 'type': 'PoolStatistics'},
}
def __init__(self, id=None, display_name=None, url=None, e_tag=None, last_modified=None, creation_time=None, state=None, state_transition_time=None, allocation_state=None, allocation_state_transition_time=None, vm_size=None, cloud_service_configuration=None, virtual_machine_configuration=None, resize_timeout=None, resize_error=None, current_dedicated=None, target_dedicated=None, enable_auto_scale=None, auto_scale_formula=None, auto_scale_evaluation_interval=None, auto_scale_run=None, enable_inter_node_communication=None, start_task=None, certificate_references=None, application_package_references=None, max_tasks_per_node=None, task_scheduling_policy=None, metadata=None, stats=None):
| self.id = id
self.display_name = display_name
self.url = url
self.e_tag = e_tag
self.last_modified = last_modified
self.creation_time = creation_time
self.state = state
self.state_transition_time = state_transition_time
self.allocation_state = allocation_state
self.allocation_state_transition_time = allocation_state_transition_time
self.vm_size = vm_size
self.cloud_service_configuration = cloud_service_configuration
self.virtual_machine_configuration = virtual_machine_configuration
self.resize_timeout = resize_timeout
self.resize_error = resize_error
self.current_dedicated = current_dedicated
self.target_dedicated = target_dedicated
self.enable_auto_scale = enable_auto_scale
self.auto_scale_formula = auto_scale_formula
self.auto_scale_evaluation_interval = auto_scale_evaluation_interval
self.auto_scale_run = auto_scale_run
self.enable_inter_node_communication = enable_inter_node_communication
self.start_task = start_task
self.certificate_references = certificate_references
self.application_package_references = application_package_references
self.max_tasks_per_node = max_tasks_per_node
self.task_scheduling_policy = task_scheduling_policy
self.metadata = metadata
self.stats = stats |
|
0001_initial.py | # Generated by Django 2.2.18 on 2021-03-21 15:42
import django.contrib.auth.models
from django.db import migrations, models
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
| initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
(
'is_superuser',
models.BooleanField(
default=False,
help_text='Designates that this user has all permissions without explicitly assigning them.',
verbose_name='superuser status',
),
),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
(
'is_staff',
models.BooleanField(
default=False,
help_text='Designates whether the user can log into this admin site.',
verbose_name='staff status',
),
),
(
'is_active',
models.BooleanField(
default=True,
help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.',
verbose_name='active',
),
),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('username', models.CharField(max_length=45, unique=True, verbose_name='用户名')),
('password', models.CharField(blank=True, max_length=255, null=True, verbose_name='登录密码')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('avatar_url', models.CharField(blank=True, max_length=255, null=True, verbose_name='头像url')),
(
'role',
models.CharField(
choices=[('GUEST', '游客'), ('USER', '普通用户'), ('ADMINISTRATOR', '超级管理员')],
default='USER',
max_length=45,
verbose_name='角色',
),
),
(
'status',
models.CharField(
choices=[('OK', '正常'), ('DISABLED', '禁用')], default='OK', max_length=45, verbose_name='状态'
),
),
('size_limit', models.BigIntegerField(default=104857600, verbose_name='单文件大小限制(B)')),
('total_size_limit', models.BigIntegerField(default=-1, verbose_name='总文件大小限制(B)')),
('total_size', models.BigIntegerField(verbose_name='文件总容量(B)')),
('last_time', models.DateTimeField(auto_now=True, verbose_name='上次登录的时间')),
('last_ip', models.CharField(blank=True, max_length=128, null=True, verbose_name='上次登录的IP')),
(
'groups',
models.ManyToManyField(
blank=True,
help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.',
related_name='user_set',
related_query_name='user',
to='auth.Group',
verbose_name='groups',
),
),
(
'user_permissions',
models.ManyToManyField(
blank=True,
help_text='Specific permissions for this user.',
related_name='user_set',
related_query_name='user',
to='auth.Permission',
verbose_name='user permissions',
),
),
],
options={'verbose_name': 'user', 'verbose_name_plural': 'users', 'abstract': False,},
managers=[('objects', django.contrib.auth.models.UserManager()),],
),
]
|
|
min-cost-climbing-stairs.py | from typing import List |
class Solution:
def minCostClimbingStairs(self, cost: List[int]) -> int:
dp = [0x7FFFFFFF for _ in range(len(cost)+2)]
dp[0] = dp[1] = 0
for i, v in enumerate(cost):
v += dp[i]
dp[i+1] = min(dp[i+1], v)
dp[i+2] = min(dp[i+2], v)
return dp[len(cost)] | |
log.go | package tunnel
import (
"fmt"
C "github.com/Dreamacro/clash/constant"
log "github.com/sirupsen/logrus"
)
type Log struct {
LogLevel C.LogLevel
Payload string
}
func (l *Log) Type() string {
return l.LogLevel.String()
}
func print(data Log) {
switch data.LogLevel {
case C.INFO:
log.Infoln(data.Payload)
case C.WARNING:
log.Warnln(data.Payload)
case C.ERROR:
log.Errorln(data.Payload)
case C.DEBUG:
log.Debugln(data.Payload)
}
}
func (t *Tunnel) subscribeLogs() {
sub, err := t.observable.Subscribe()
if err != nil {
log.Fatalf("Can't subscribe tunnel log: %s", err.Error())
}
for elm := range sub {
data := elm.(Log)
if data.LogLevel <= t.logLevel {
print(data)
}
}
}
func | (logLevel C.LogLevel, format string, v ...interface{}) Log {
return Log{
LogLevel: logLevel,
Payload: fmt.Sprintf(format, v...),
}
}
| newLog |
push.go | // Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package push provides functions to push metrics to a Pushgateway. It uses a
// builder approach. Create a Pusher with New and then add the various options
// by using its methods, finally calling Add or Push, like this:
//
// // Easy case:
// push.New("http://example.org/metrics", "my_job").Gatherer(myRegistry).Push()
//
// // Complex case:
// push.New("http://example.org/metrics", "my_job").
// Collector(myCollector1).
// Collector(myCollector2).
// Grouping("zone", "xy").
// Client(&myHTTPClient).
// BasicAuth("top", "secret").
// Add()
//
// See the examples section for more detailed examples.
//
// See the documentation of the Pushgateway to understand the meaning of
// the grouping key and the differences between Push and Add:
// https://github.com/prometheus/pushgateway
package push
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
"gx/ipfs/QmTWEDbLX2d4NiMgPks6J2crRz47BamBtP16WiFuTL6Ydm/common/expfmt"
"gx/ipfs/QmTWEDbLX2d4NiMgPks6J2crRz47BamBtP16WiFuTL6Ydm/common/model"
"gx/ipfs/QmTQuFQWHAWy4wMH6ZyPfGiawA5u9T8rs79FENoV8yXaoS/client_golang/prometheus"
)
const contentTypeHeader = "Content-Type"
// Pusher manages a push to the Pushgateway. Use New to create one, configure it
// with its methods, and finally use the Add or Push method to push.
type Pusher struct {
error error
url, job string
grouping map[string]string
gatherers prometheus.Gatherers
registerer prometheus.Registerer
client *http.Client
useBasicAuth bool
username, password string
}
// New creates a new Pusher to push to the provided URL with the provided job
// name. You can use just host:port or ip:port as url, in which case “http://”
// is added automatically. Alternatively, include the schema in the
// URL. However, do not include the “/metrics/jobs/…” part.
//
// Note that until https://github.com/prometheus/pushgateway/issues/97 is
// resolved, a “/” character in the job name is prohibited.
func New(url, job string) *Pusher { | reg = prometheus.NewRegistry()
err error
)
if !strings.Contains(url, "://") {
url = "http://" + url
}
if strings.HasSuffix(url, "/") {
url = url[:len(url)-1]
}
if strings.Contains(job, "/") {
err = fmt.Errorf("job contains '/': %s", job)
}
return &Pusher{
error: err,
url: url,
job: job,
grouping: map[string]string{},
gatherers: prometheus.Gatherers{reg},
registerer: reg,
client: &http.Client{},
}
}
// Push collects/gathers all metrics from all Collectors and Gatherers added to
// this Pusher. Then, it pushes them to the Pushgateway configured while
// creating this Pusher, using the configured job name and any added grouping
// labels as grouping key. All previously pushed metrics with the same job and
// other grouping labels will be replaced with the metrics pushed by this
// call. (It uses HTTP method “PUT” to push to the Pushgateway.)
//
// Push returns the first error encountered by any method call (including this
// one) in the lifetime of the Pusher.
func (p *Pusher) Push() error {
return p.push("PUT")
}
// Add works like push, but only previously pushed metrics with the same name
// (and the same job and other grouping labels) will be replaced. (It uses HTTP
// method “POST” to push to the Pushgateway.)
func (p *Pusher) Add() error {
return p.push("POST")
}
// Gatherer adds a Gatherer to the Pusher, from which metrics will be gathered
// to push them to the Pushgateway. The gathered metrics must not contain a job
// label of their own.
//
// For convenience, this method returns a pointer to the Pusher itself.
func (p *Pusher) Gatherer(g prometheus.Gatherer) *Pusher {
p.gatherers = append(p.gatherers, g)
return p
}
// Collector adds a Collector to the Pusher, from which metrics will be
// collected to push them to the Pushgateway. The collected metrics must not
// contain a job label of their own.
//
// For convenience, this method returns a pointer to the Pusher itself.
func (p *Pusher) Collector(c prometheus.Collector) *Pusher {
if p.error == nil {
p.error = p.registerer.Register(c)
}
return p
}
// Grouping adds a label pair to the grouping key of the Pusher, replacing any
// previously added label pair with the same label name. Note that setting any
// labels in the grouping key that are already contained in the metrics to push
// will lead to an error.
//
// For convenience, this method returns a pointer to the Pusher itself.
//
// Note that until https://github.com/prometheus/pushgateway/issues/97 is
// resolved, this method does not allow a “/” character in the label value.
func (p *Pusher) Grouping(name, value string) *Pusher {
if p.error == nil {
if !model.LabelName(name).IsValid() {
p.error = fmt.Errorf("grouping label has invalid name: %s", name)
return p
}
if strings.Contains(value, "/") {
p.error = fmt.Errorf("value of grouping label %s contains '/': %s", name, value)
return p
}
p.grouping[name] = value
}
return p
}
// Client sets a custom HTTP client for the Pusher. For convenience, this method
// returns a pointer to the Pusher itself.
func (p *Pusher) Client(c *http.Client) *Pusher {
p.client = c
return p
}
// BasicAuth configures the Pusher to use HTTP Basic Authentication with the
// provided username and password. For convenience, this method returns a
// pointer to the Pusher itself.
func (p *Pusher) BasicAuth(username, password string) *Pusher {
p.useBasicAuth = true
p.username = username
p.password = password
return p
}
func (p *Pusher) push(method string) error {
if p.error != nil {
return p.error
}
urlComponents := []string{url.QueryEscape(p.job)}
for ln, lv := range p.grouping {
urlComponents = append(urlComponents, ln, lv)
}
pushURL := fmt.Sprintf("%s/metrics/job/%s", p.url, strings.Join(urlComponents, "/"))
mfs, err := p.gatherers.Gather()
if err != nil {
return err
}
buf := &bytes.Buffer{}
enc := expfmt.NewEncoder(buf, expfmt.FmtProtoDelim)
// Check for pre-existing grouping labels:
for _, mf := range mfs {
for _, m := range mf.GetMetric() {
for _, l := range m.GetLabel() {
if l.GetName() == "job" {
return fmt.Errorf("pushed metric %s (%s) already contains a job label", mf.GetName(), m)
}
if _, ok := p.grouping[l.GetName()]; ok {
return fmt.Errorf(
"pushed metric %s (%s) already contains grouping label %s",
mf.GetName(), m, l.GetName(),
)
}
}
}
enc.Encode(mf)
}
req, err := http.NewRequest(method, pushURL, buf)
if err != nil {
return err
}
if p.useBasicAuth {
req.SetBasicAuth(p.username, p.password)
}
req.Header.Set(contentTypeHeader, string(expfmt.FmtProtoDelim))
resp, err := p.client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != 202 {
body, _ := ioutil.ReadAll(resp.Body) // Ignore any further error as this is for an error message only.
return fmt.Errorf("unexpected status code %d while pushing to %s: %s", resp.StatusCode, pushURL, body)
}
return nil
} | var ( |
lavalle_rrts.py | #!/usr/bin/env python
# rrt.py
# This program generates a simple rapidly
# exploring random tree (RRT) in a rectangular region.
#
# Written by Steve LaValle
# May 2011
import sys, random, math, pygame
from pygame.locals import *
from math import sqrt,cos,sin,atan2
import heapq
import numpy as np
#constants
XDIM = 500
YDIM = 500
WINSIZE = np.array([XDIM, YDIM])
MAX_STEP_SIZE = 12
NUMNODES = 5000
NUM_OBSTACLES = 30
OBSTACLE_WIDTH = 80
OBSTACLE_HEIGHT = 80
RAND_SEARCH_PROB = 0.25
GOAL_TOL = 1e-3
start = WINSIZE/2
goal1 = np.zeros((1,2))
goal2 = WINSIZE.reshape((1,2))
def step_from_to(p1,p2):
|
def main():
#initialize and prepare screen
pygame.init()
screen = pygame.display.set_mode(WINSIZE)
pygame.display.set_caption('RRT S. LaValle May 2011')
white = 255, 240, 200
black = 20, 20, 40
screen.fill(black)
obstacles = []
for _ in range(NUM_OBSTACLES):
rand_rect = np.random.rand(4)*np.array([XDIM,YDIM,OBSTACLE_WIDTH,OBSTACLE_HEIGHT]) + np.ones(4)*MAX_STEP_SIZE
if (rand_rect[:2] < start).all() and (rand_rect[:2]+rand_rect[2:] > start).all():
print('skip!')
continue
if (rand_rect[:2] < goal1).all() and (rand_rect[:2]+rand_rect[2:] > goal1).all():
print('skip!')
continue
if (rand_rect[:2] < goal2).all() and (rand_rect[:2]+rand_rect[2:] > goal2).all():
print('skip!')
continue
obstacles.append(rand_rect)
for idx,o in enumerate(obstacles):
weight = idx/(len(obstacles)-1)
color = (240-240*weight,128,40+(255-40)*weight)
screen.fill(color,o)
nodes = np.array([start])[:np.newaxis]
connections = np.array([0])
print(nodes.shape,connections.shape)
for goal in [goal1,goal2]:
searching = True
prev_node = None
for i in range(NUMNODES):
if searching:
# get a random configuration
#valid = False
#while not valid:
if prev_node is None:
rand = np.random.rand(1,2)*WINSIZE if np.random.rand() > RAND_SEARCH_PROB else goal
else:
rand = prev_node
#valid = True
#for o in obstacles:
#if (o[:2] < rand[0]).all() and (o[:2]+o[2:] > rand[0]).all():
#valid = False
#break
dists = np.linalg.norm(nodes-rand,axis=1)
#print(dists)
closest_idx = np.argmin(dists)
closest = nodes[closest_idx]
new_node = step_from_to(closest,rand)
valid_new_node = True
for o in obstacles:
if (o[:2] < new_node[0]).all() and (o[:2]+o[2:] > new_node[0]).all():
valid_new_node = False
break
if valid_new_node:
if (rand == goal).all() and np.linalg.norm(new_node - goal) < dists.min():
prev_node = new_node
#print('new')
else:
prev_node = None
#print('cancel')
if np.linalg.norm(new_node - goal) > GOAL_TOL:
#print(goal,new_node)
nodes = np.append(nodes,new_node,0)
connections = np.append(connections,closest_idx)
#print(np.linalg.norm(new_node - goal),nodes.shape,connections.shape)
pygame.draw.line(screen,white,np.squeeze(closest),np.squeeze(new_node))
else:
print(new_node,goal)
path_node = closest_idx
while path_node != 0:
print(path_node,end=',',flush=True)
path_node = connections[path_node]
print(0)
searching = False
break
else:
prev_node = None
pygame.display.update()
#print i, " ", nodes
for e in pygame.event.get():
if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
sys.exit("Leaving because you requested it.")
while True:
for e in pygame.event.get():
if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
sys.exit("Leaving because you requested it.")
# if python says run, then we should run
if __name__ == '__main__':
main()
| if np.linalg.norm(p1-p2) < MAX_STEP_SIZE:
return p2
else:
diff = p2-p1
return p1 + MAX_STEP_SIZE*diff/np.linalg.norm(diff) |
optim.py | import numpy as np
"""
This file implements various first-order update rules that are commonly used
for training neural networks. Each update rule accepts current weights and the
gradient of the loss with respect to those weights and produces the next set of
weights. Each update rule has the same interface:
def update(w, dw, config=None):
Inputs:
- w: A numpy array giving the current weights.
- dw: A numpy array of the same shape as w giving the gradient of the
loss with respect to w.
- config: A dictionary containing hyperparameter values such as learning
rate, momentum, etc. If the update rule requires caching values over many
iterations, then config will also hold these cached values.
Returns:
- next_w: The next point after the update.
- config: The config dictionary to be passed to the next iteration of the
update rule.
NOTE: For most update rules, the default learning rate will probably not
perform well; however the default values of the other hyperparameters should
work well for a variety of different problems.
For efficiency, update rules may perform in-place updates, mutating w and
setting next_w equal to w.
"""
def sgd(w, dw, config=None):
"""
Performs vanilla stochastic gradient descent.
config format:
- learning_rate: Scalar learning rate.
"""
if config is None:
config = {}
config.setdefault("learning_rate", 1e-2)
w -= config["learning_rate"] * dw
return w, config
def sgd_momentum(w, dw, config=None):
"""
Performs stochastic gradient descent with momentum.
config format:
- learning_rate: Scalar learning rate.
- momentum: Scalar between 0 and 1 giving the momentum value.
Setting momentum = 0 reduces to sgd.
- velocity: A numpy array of the same shape as w and dw used to store a
moving average of the gradients.
"""
if config is None:
config = {}
config.setdefault("learning_rate", 1e-2)
config.setdefault("momentum", 0.9)
v = config.get("velocity", np.zeros_like(w))
next_w=None
###########################################################################
# TODO: Implement the momentum update formula. Store the updated value in #
# the next_w variable. You should also use and update the velocity v. #
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
v= config["momentum"]*v - config["learning_rate"]*dw
next_w=w+v
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
# END OF YOUR CODE #
###########################################################################
config["velocity"] = v
return next_w, config
def rmsprop(w, dw, config=None):
"""
Uses the RMSProp update rule, which uses a moving average of squared
gradient values to set adaptive per-parameter learning rates.
config format:
- learning_rate: Scalar learning rate.
- decay_rate: Scalar between 0 and 1 giving the decay rate for the squared
gradient cache.
- epsilon: Small scalar used for smoothing to avoid dividing by zero.
- cache: Moving average of second moments of gradients.
"""
if config is None:
config = {}
config.setdefault("learning_rate", 1e-2)
config.setdefault("decay_rate", 0.99)
config.setdefault("epsilon", 1e-8)
config.setdefault("cache", np.zeros_like(w))
next_w = None
###########################################################################
# TODO: Implement the RMSprop update formula, storing the next value of w #
# in the next_w variable. Don't forget to update cache value stored in #
# config['cache']. #
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
cache=config["cache"]
cache=config["decay_rate"]*cache + (1-config["decay_rate"])*dw**2
w+=(-config["learning_rate"]*dw)/(np.sqrt(cache)+config["epsilon"])
next_w=w
config["cache"]=cache
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
# END OF YOUR CODE #
###########################################################################
return next_w, config
def | (w, dw, config=None):
"""
Uses the Adam update rule, which incorporates moving averages of both the
gradient and its square and a bias correction term.
config format:
- learning_rate: Scalar learning rate.
- beta1: Decay rate for moving average of first moment of gradient.
- beta2: Decay rate for moving average of second moment of gradient.
- epsilon: Small scalar used for smoothing to avoid dividing by zero.
- m: Moving average of gradient.
- v: Moving average of squared gradient.
- t: Iteration number.
"""
if config is None:
config = {}
config.setdefault("learning_rate", 1e-3)
config.setdefault("beta1", 0.9)
config.setdefault("beta2", 0.999)
config.setdefault("epsilon", 1e-8)
config.setdefault("m", np.zeros_like(w))
config.setdefault("v", np.zeros_like(w))
config.setdefault("t", 0)
next_w = None
###########################################################################
# TODO: Implement the Adam update formula, storing the next value of w in #
# the next_w variable. Don't forget to update the m, v, and t variables #
# stored in config. #
# #
# NOTE: In order to match the reference output, please modify t _before_ #
# using it in any calculations. #
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
lr=config["learning_rate"]
b1,b2,ep=config["beta1"],config["beta2"],config["epsilon"]
m=config["m"]
v=config["v"]
t=config["t"]
t+=1
m=b1*m+(1-b1)*dw
mt=m/(1-b1**t)
v=b2*v+(1-b2)*dw**2
vt=v/(1-b2**t)
w-=(lr*mt)/(np.sqrt(vt)+ep)
config["m"],config["v"],config["t"]=m,v,t
next_w=w
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
# END OF YOUR CODE #
###########################################################################
return next_w, config
| adam |
time_test.go | package utils
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestParseDurationString_ShouldParseDurationString(t *testing.T) {
duration, err := ParseDurationString("1h")
assert.NoError(t, err)
assert.Equal(t, 60*time.Minute, duration)
}
func TestParseDurationString_ShouldParseBlankString(t *testing.T) {
duration, err := ParseDurationString("")
assert.NoError(t, err)
assert.Equal(t, time.Second*0, duration)
}
func TestParseDurationString_ShouldParseDurationStringAllUnits(t *testing.T) {
duration, err := ParseDurationString("1y")
assert.NoError(t, err)
assert.Equal(t, time.Hour*24*365, duration)
duration, err = ParseDurationString("1M")
assert.NoError(t, err)
assert.Equal(t, time.Hour*24*30, duration)
duration, err = ParseDurationString("1w")
assert.NoError(t, err)
assert.Equal(t, time.Hour*24*7, duration)
duration, err = ParseDurationString("1d")
assert.NoError(t, err)
assert.Equal(t, time.Hour*24, duration)
duration, err = ParseDurationString("1h")
assert.NoError(t, err)
assert.Equal(t, time.Hour, duration)
duration, err = ParseDurationString("1s")
assert.NoError(t, err)
assert.Equal(t, time.Second, duration)
}
func TestParseDurationString_ShouldParseSecondsString(t *testing.T) {
duration, err := ParseDurationString("100")
assert.NoError(t, err)
assert.Equal(t, 100*time.Second, duration)
}
func TestParseDurationString_ShouldNotParseDurationStringWithOutOfOrderQuantitiesAndUnits(t *testing.T) {
duration, err := ParseDurationString("h1")
assert.EqualError(t, err, "could not parse 'h1' as a duration")
assert.Equal(t, time.Duration(0), duration)
}
func TestParseDurationString_ShouldNotParseBadDurationString(t *testing.T) {
duration, err := ParseDurationString("10x")
assert.EqualError(t, err, "could not parse the units portion of '10x' in duration string '10x': the unit 'x' is not valid")
assert.Equal(t, time.Duration(0), duration)
}
func TestParseDurationString_ShouldParseDurationStringWithMultiValueUnits(t *testing.T) {
duration, err := ParseDurationString("10ms")
assert.NoError(t, err)
assert.Equal(t, time.Duration(10)*time.Millisecond, duration)
}
func TestParseDurationString_ShouldParseDurationStringWithLeadingZero(t *testing.T) {
duration, err := ParseDurationString("005h")
assert.NoError(t, err)
assert.Equal(t, time.Duration(5)*time.Hour, duration)
}
func TestParseDurationString_ShouldParseMultiUnitValues(t *testing.T) {
duration, err := ParseDurationString("1d3w10ms")
assert.NoError(t, err)
assert.Equal(t,
(time.Hour*time.Duration(24))+
(time.Hour*time.Duration(24)*time.Duration(7)*time.Duration(3))+
(time.Millisecond*time.Duration(10)), duration)
}
func TestParseDurationString_ShouldParseDuplicateUnitValues(t *testing.T) {
duration, err := ParseDurationString("1d4d2d")
assert.NoError(t, err)
assert.Equal(t,
(time.Hour*time.Duration(24))+
(time.Hour*time.Duration(24)*time.Duration(4))+
(time.Hour*time.Duration(24)*time.Duration(2)), duration)
}
func TestStandardizeDurationString_ShouldParseStringWithSpaces(t *testing.T) |
func TestShouldTimeIntervalsMakeSense(t *testing.T) {
assert.Equal(t, Hour, time.Minute*60)
assert.Equal(t, Day, Hour*24)
assert.Equal(t, Week, Day*7)
assert.Equal(t, Year, Day*365)
assert.Equal(t, Month, Year/12)
}
| {
result, err := StandardizeDurationString("1d 1h 20m")
assert.NoError(t, err)
assert.Equal(t, result, "24h1h20m")
} |
darknet_zed.py | #!python3
"""
Python 3 wrapper for identifying objects in images
Requires DLL compilation
Original *nix 2.7: https://github.com/pjreddie/darknet/blob/0f110834f4e18b30d5f101bf8f1724c34b7b83db/python/darknet.py
Windows Python 2.7 version: https://github.com/AlexeyAB/darknet/blob/fc496d52bf22a0bb257300d3c79be9cd80e722cb/build/darknet/x64/darknet.py
@author: Philip Kahn, Aymeric Dujardin
@date: 20180911
"""
# pylint: disable=R, W0401, W0614, W0703
import os
import sys
import time
import logging
import random
from random import randint
import math
import statistics
import getopt
from ctypes import *
import numpy as np
import cv2
import pyzed.sl as sl
# Get the top-level logger object
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def sample(probs):
s = sum(probs)
probs = [a/s for a in probs]
r = random.uniform(0, 1)
for i in range(len(probs)):
r = r - probs[i]
if r <= 0:
return i
return len(probs)-1
def c_array(ctype, values):
arr = (ctype*len(values))() | return arr
class BOX(Structure):
_fields_ = [("x", c_float),
("y", c_float),
("w", c_float),
("h", c_float)]
class DETECTION(Structure):
_fields_ = [("bbox", BOX),
("classes", c_int),
("prob", POINTER(c_float)),
("mask", POINTER(c_float)),
("objectness", c_float),
("sort_class", c_int)]
class IMAGE(Structure):
_fields_ = [("w", c_int),
("h", c_int),
("c", c_int),
("data", POINTER(c_float))]
class METADATA(Structure):
_fields_ = [("classes", c_int),
("names", POINTER(c_char_p))]
#lib = CDLL("/home/pjreddie/documents/darknet/libdarknet.so", RTLD_GLOBAL)
#lib = CDLL("darknet.so", RTLD_GLOBAL)
hasGPU = True
if os.name == "nt":
cwd = os.path.dirname(__file__)
os.environ['PATH'] = cwd + ';' + os.environ['PATH']
winGPUdll = os.path.join(cwd, "yolo_cpp_dll.dll")
winNoGPUdll = os.path.join(cwd, "yolo_cpp_dll_nogpu.dll")
envKeys = list()
for k, v in os.environ.items():
envKeys.append(k)
try:
try:
tmp = os.environ["FORCE_CPU"].lower()
if tmp in ["1", "true", "yes", "on"]:
raise ValueError("ForceCPU")
else:
log.info("Flag value '"+tmp+"' not forcing CPU mode")
except KeyError:
# We never set the flag
if 'CUDA_VISIBLE_DEVICES' in envKeys:
if int(os.environ['CUDA_VISIBLE_DEVICES']) < 0:
raise ValueError("ForceCPU")
try:
global DARKNET_FORCE_CPU
if DARKNET_FORCE_CPU:
raise ValueError("ForceCPU")
except NameError:
pass
# log.info(os.environ.keys())
# log.warning("FORCE_CPU flag undefined, proceeding with GPU")
if not os.path.exists(winGPUdll):
raise ValueError("NoDLL")
lib = CDLL(winGPUdll, RTLD_GLOBAL)
except (KeyError, ValueError):
hasGPU = False
if os.path.exists(winNoGPUdll):
lib = CDLL(winNoGPUdll, RTLD_GLOBAL)
log.warning("Notice: CPU-only mode")
else:
# Try the other way, in case no_gpu was
# compile but not renamed
lib = CDLL(winGPUdll, RTLD_GLOBAL)
log.warning("Environment variables indicated a CPU run, but we didn't find `" +
winNoGPUdll+"`. Trying a GPU run anyway.")
else:
lib = CDLL("../libdarknet/libdarknet.so", RTLD_GLOBAL)
lib.network_width.argtypes = [c_void_p]
lib.network_width.restype = c_int
lib.network_height.argtypes = [c_void_p]
lib.network_height.restype = c_int
predict = lib.network_predict
predict.argtypes = [c_void_p, POINTER(c_float)]
predict.restype = POINTER(c_float)
if hasGPU:
set_gpu = lib.cuda_set_device
set_gpu.argtypes = [c_int]
make_image = lib.make_image
make_image.argtypes = [c_int, c_int, c_int]
make_image.restype = IMAGE
get_network_boxes = lib.get_network_boxes
get_network_boxes.argtypes = [c_void_p, c_int, c_int, c_float, c_float, POINTER(
c_int), c_int, POINTER(c_int), c_int]
get_network_boxes.restype = POINTER(DETECTION)
make_network_boxes = lib.make_network_boxes
make_network_boxes.argtypes = [c_void_p]
make_network_boxes.restype = POINTER(DETECTION)
free_detections = lib.free_detections
free_detections.argtypes = [POINTER(DETECTION), c_int]
free_ptrs = lib.free_ptrs
free_ptrs.argtypes = [POINTER(c_void_p), c_int]
network_predict = lib.network_predict
network_predict.argtypes = [c_void_p, POINTER(c_float)]
reset_rnn = lib.reset_rnn
reset_rnn.argtypes = [c_void_p]
load_net = lib.load_network
load_net.argtypes = [c_char_p, c_char_p, c_int]
load_net.restype = c_void_p
load_net_custom = lib.load_network_custom
load_net_custom.argtypes = [c_char_p, c_char_p, c_int, c_int]
load_net_custom.restype = c_void_p
do_nms_obj = lib.do_nms_obj
do_nms_obj.argtypes = [POINTER(DETECTION), c_int, c_int, c_float]
do_nms_sort = lib.do_nms_sort
do_nms_sort.argtypes = [POINTER(DETECTION), c_int, c_int, c_float]
free_image = lib.free_image
free_image.argtypes = [IMAGE]
letterbox_image = lib.letterbox_image
letterbox_image.argtypes = [IMAGE, c_int, c_int]
letterbox_image.restype = IMAGE
load_meta = lib.get_metadata
lib.get_metadata.argtypes = [c_char_p]
lib.get_metadata.restype = METADATA
load_image = lib.load_image_color
load_image.argtypes = [c_char_p, c_int, c_int]
load_image.restype = IMAGE
rgbgr_image = lib.rgbgr_image
rgbgr_image.argtypes = [IMAGE]
predict_image = lib.network_predict_image
predict_image.argtypes = [c_void_p, IMAGE]
predict_image.restype = POINTER(c_float)
def array_to_image(arr):
import numpy as np
# need to return old values to avoid python freeing memory
arr = arr.transpose(2, 0, 1)
c = arr.shape[0]
h = arr.shape[1]
w = arr.shape[2]
arr = np.ascontiguousarray(arr.flat, dtype=np.float32) / 255.0
data = arr.ctypes.data_as(POINTER(c_float))
im = IMAGE(w, h, c, data)
return im, arr
def classify(net, meta, im):
out = predict_image(net, im)
res = []
for i in range(meta.classes):
if altNames is None:
name_tag = meta.names[i]
else:
name_tag = altNames[i]
res.append((name_tag, out[i]))
res = sorted(res, key=lambda x: -x[1])
return res
def detect(net, meta, image, thresh=.5, hier_thresh=.5, nms=.45, debug=False):
"""
Performs the detection
"""
custom_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
custom_image = cv2.resize(custom_image, (lib.network_width(
net), lib.network_height(net)), interpolation=cv2.INTER_LINEAR)
im, arr = array_to_image(custom_image)
num = c_int(0)
pnum = pointer(num)
predict_image(net, im)
dets = get_network_boxes(
net, image.shape[1], image.shape[0], thresh, hier_thresh, None, 0, pnum, 0)
num = pnum[0]
if nms:
do_nms_sort(dets, num, meta.classes, nms)
res = []
if debug:
log.debug("about to range")
for j in range(num):
for i in range(meta.classes):
if dets[j].prob[i] > 0:
b = dets[j].bbox
if altNames is None:
name_tag = meta.names[i]
else:
name_tag = altNames[i]
res.append((name_tag, dets[j].prob[i], (b.x, b.y, b.w, b.h), i))
res = sorted(res, key=lambda x: -x[1])
free_detections(dets, num)
return res
netMain = None
metaMain = None
altNames = None
def get_object_depth(depth, bounds):
'''
Calculates the median x, y, z position of top slice(area_div) of point cloud
in camera frame.
Arguments:
depth: Point cloud data of whole frame.
bounds: Bounding box for object in pixels.
bounds[0]: x-center
bounds[1]: y-center
bounds[2]: width of bounding box.
bounds[3]: height of bounding box.
Return:
x, y, z: Location of object in meters.
'''
area_div = 2
x_vect = []
y_vect = []
z_vect = []
for j in range(int(bounds[0] - area_div), int(bounds[0] + area_div)):
for i in range(int(bounds[1] - area_div), int(bounds[1] + area_div)):
z = depth[i, j, 2]
if not np.isnan(z) and not np.isinf(z):
x_vect.append(depth[i, j, 0])
y_vect.append(depth[i, j, 1])
z_vect.append(z)
try:
x_median = statistics.median(x_vect)
y_median = statistics.median(y_vect)
z_median = statistics.median(z_vect)
except Exception:
x_median = -1
y_median = -1
z_median = -1
pass
return x_median, y_median, z_median
def generate_color(meta_path):
'''
Generate random colors for the number of classes mentioned in data file.
Arguments:
meta_path: Path to .data file.
Return:
color_array: RGB color codes for each class.
'''
random.seed(42)
with open(meta_path, 'r') as f:
content = f.readlines()
class_num = int(content[0].split("=")[1])
color_array = []
for x in range(0, class_num):
color_array.append((randint(0, 255), randint(0, 255), randint(0, 255)))
return color_array
def main(argv):
thresh = 0.25
darknet_path="../libdarknet/"
config_path = darknet_path + "cfg/yolov3-tiny.cfg"
weight_path = "yolov3-tiny.weights"
meta_path = "coco.data"
svo_path = None
zed_id = 0
help_str = 'darknet_zed.py -c <config> -w <weight> -m <meta> -t <threshold> -s <svo_file> -z <zed_id>'
try:
opts, args = getopt.getopt(
argv, "hc:w:m:t:s:z:", ["config=", "weight=", "meta=", "threshold=", "svo_file=", "zed_id="])
except getopt.GetoptError:
log.exception(help_str)
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
log.info(help_str)
sys.exit()
elif opt in ("-c", "--config"):
config_path = arg
elif opt in ("-w", "--weight"):
weight_path = arg
elif opt in ("-m", "--meta"):
meta_path = arg
elif opt in ("-t", "--threshold"):
thresh = float(arg)
elif opt in ("-s", "--svo_file"):
svo_path = arg
elif opt in ("-z", "--zed_id"):
zed_id = int(arg)
input_type = sl.InputType()
if svo_path is not None:
log.info("SVO file : " + svo_path)
input_type.set_from_svo_file(svo_path)
else:
# Launch camera by id
input_type.set_from_camera_id(zed_id)
init = sl.InitParameters(input_t=input_type)
init.coordinate_units = sl.UNIT.METER
cam = sl.Camera()
if not cam.is_opened():
log.info("Opening ZED Camera...")
status = cam.open(init)
if status != sl.ERROR_CODE.SUCCESS:
log.error(repr(status))
exit()
runtime = sl.RuntimeParameters()
# Use STANDARD sensing mode
runtime.sensing_mode = sl.SENSING_MODE.STANDARD
mat = sl.Mat()
point_cloud_mat = sl.Mat()
# Import the global variables. This lets us instance Darknet once,
# then just call performDetect() again without instancing again
global metaMain, netMain, altNames # pylint: disable=W0603
assert 0 < thresh < 1, "Threshold should be a float between zero and one (non-inclusive)"
if not os.path.exists(config_path):
raise ValueError("Invalid config path `" +
os.path.abspath(config_path)+"`")
if not os.path.exists(weight_path):
raise ValueError("Invalid weight path `" +
os.path.abspath(weight_path)+"`")
if not os.path.exists(meta_path):
raise ValueError("Invalid data file path `" +
os.path.abspath(meta_path)+"`")
if netMain is None:
netMain = load_net_custom(config_path.encode(
"ascii"), weight_path.encode("ascii"), 0, 1) # batch size = 1
if metaMain is None:
metaMain = load_meta(meta_path.encode("ascii"))
if altNames is None:
# In thon 3, the metafile default access craps out on Windows (but not Linux)
# Read the names file and create a list to feed to detect
try:
with open(meta_path) as meta_fh:
meta_contents = meta_fh.read()
import re
match = re.search("names *= *(.*)$", meta_contents,
re.IGNORECASE | re.MULTILINE)
if match:
result = match.group(1)
else:
result = None
try:
if os.path.exists(result):
with open(result) as names_fh:
names_list = names_fh.read().strip().split("\n")
altNames = [x.strip() for x in names_list]
except TypeError:
pass
except Exception:
pass
color_array = generate_color(meta_path)
log.info("Running...")
key = ''
while key != 113: # for 'q' key
start_time = time.time() # start time of the loop
err = cam.grab(runtime)
if err == sl.ERROR_CODE.SUCCESS:
cam.retrieve_image(mat, sl.VIEW.LEFT)
image = mat.get_data()
cam.retrieve_measure(
point_cloud_mat, sl.MEASURE.XYZRGBA)
depth = point_cloud_mat.get_data()
# Do the detection
detections = detect(netMain, metaMain, image, thresh)
log.info(chr(27) + "[2J"+"**** " + str(len(detections)) + " Results ****")
for detection in detections:
label = detection[0]
confidence = detection[1]
pstring = label+": "+str(np.rint(100 * confidence))+"%"
log.info(pstring)
bounds = detection[2]
y_extent = int(bounds[3])
x_extent = int(bounds[2])
# Coordinates are around the center
x_coord = int(bounds[0] - bounds[2]/2)
y_coord = int(bounds[1] - bounds[3]/2)
#boundingBox = [[x_coord, y_coord], [x_coord, y_coord + y_extent], [x_coord + x_extent, y_coord + y_extent], [x_coord + x_extent, y_coord]]
thickness = 1
x, y, z = get_object_depth(depth, bounds)
distance = math.sqrt(x * x + y * y + z * z)
distance = "{:.2f}".format(distance)
cv2.rectangle(image, (x_coord - thickness, y_coord - thickness),
(x_coord + x_extent + thickness, y_coord + (18 + thickness*4)),
color_array[detection[3]], -1)
cv2.putText(image, label + " " + (str(distance) + " m"),
(x_coord + (thickness * 4), y_coord + (10 + thickness * 4)),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
cv2.rectangle(image, (x_coord - thickness, y_coord - thickness),
(x_coord + x_extent + thickness, y_coord + y_extent + thickness),
color_array[detection[3]], int(thickness*2))
cv2.imshow("ZED", image)
key = cv2.waitKey(5)
log.info("FPS: {}".format(1.0 / (time.time() - start_time)))
else:
key = cv2.waitKey(5)
cv2.destroyAllWindows()
cam.close()
log.info("\nFINISH")
if __name__ == "__main__":
main(sys.argv[1:]) | arr[:] = values |
command_template.js | //code
message.channel.send('Pong.');
},
}; | module.exports = {
name: '', //This is the user input parameter ie. {perfix}ping = !ping , response pong
description: '', //Allows for a dynamic description of the command
execute(message, args) { |
|
main.go | package main
import (
"github.com/abiosoft/ishell"
"github.com/metrosystems-cpe/DDOM/config"
"github.com/metrosystems-cpe/DDOM/menu"
"github.com/metrosystems-cpe/DDOM/utils"
)
var (
shell *ishell.Shell
appConfig utils.AppConfig
)
func init() {
shell = ishell.New()
shell.SetHistoryPath("./history")
appConfig.OrgCfg = config.LoadfromFile()
shell.Set("appConfig", &appConfig)
}
func | () {
menu.ComputeCommands(shell)
menu.Run(shell)
}
| main |
permissions-info.tsx | import React, {FC, useState} from 'react';
import {Text, StyleSheet, View, Image} from 'react-native';
import {useTranslation} from 'react-i18next';
import {StackNavigationProp} from '@react-navigation/stack';
import {useExposure} from 'react-native-exposure-notification-service';
import Button from '../../atoms/button';
import Spacing from '../../atoms/spacing';
import Markdown from '../../atoms/markdown';
import {text} from '../../../theme';
import {ScreenNames} from '../../../navigation';
import {useSettings} from '../../../providers/settings';
import {useApplication} from '../../../providers/context';
import * as SecureStore from 'expo-secure-store';
const IconBell = require('../../../assets/images/icon-bell/icon-bell.png');
const IconBt = require('../../../assets/images/icon-bt/icon-bt.png');
interface PermissionInfoProps {
navigation: StackNavigationProp<any>;
} | const [disabled, setDisabled] = useState(false);
const {askPermissions} = useExposure();
const application = useApplication();
const handlePermissions = async () => {
setDisabled(true);
SecureStore.setItemAsync('analyticsConsent', String(true), {});
try {
await askPermissions();
reload();
await application.setContext({completedExposureOnboarding: true});
setTimeout(() => navigation.replace(ScreenNames.dashboard), 1000);
} catch (e) {
setDisabled(false);
console.log("Error opening app's settings", e);
}
};
return (
<View style={styles.container}>
<Spacing s={24} />
<View style={styles.column}>
<View style={styles.row}>
<View style={styles.iconWrapper}>
<Image source={IconBt} accessibilityIgnoresInvertColors={false} />
</View>
<View style={styles.column}>
<Markdown markdownStyles={markdownStyles}>
{t('onboarding:permissionsInfo:view:item1')}
</Markdown>
</View>
</View>
<Spacing s={28} />
<View style={styles.row}>
<View style={styles.iconWrapper}>
<Image source={IconBell} accessibilityIgnoresInvertColors={false} />
</View>
<View style={styles.column}>
<Text style={[styles.viewText, styles.bold]}>
{t('onboarding:permissionsInfo:view:item2')}
</Text>
</View>
</View>
</View>
<Spacing s={56} />
<Button
disabled={disabled}
onPress={handlePermissions}
hint={t('onboarding:permissionsInfo:accessibility:nextHint')}
label={t('onboarding:permissionsInfo:accessibility:nextLabel')}>
{t('common:next:label')}
</Button>
<Spacing s={20} />
</View>
);
};
const markdownStyles = StyleSheet.create({
h1: {
lineHeight: 20
}
});
const styles = StyleSheet.create({
container: {flex: 1},
column: {
flex: 1,
flexDirection: 'column'
},
row: {flexDirection: 'row'},
iconWrapper: {
width: 80,
paddingLeft: 15,
paddingTop: 2
},
viewText: {
...text.default
},
bold: {
...text.defaultBold
}
});
export default PermissionsInfo; |
const PermissionsInfo: FC<PermissionInfoProps> = ({navigation}) => {
const {t} = useTranslation();
const {reload} = useSettings(); |
pinchrotateinteraction.js | goog.provide('ol.interaction.PinchRotate');
goog.require('goog.asserts');
goog.require('ol');
goog.require('ol.functions');
goog.require('ol.ViewHint');
goog.require('ol.interaction.Interaction');
goog.require('ol.interaction.Pointer');
/**
* @classdesc
* Allows the user to rotate the map by twisting with two fingers
* on a touch screen.
*
* @constructor
* @extends {ol.interaction.Pointer}
* @param {olx.interaction.PinchRotateOptions=} opt_options Options.
* @api stable
*/
ol.interaction.PinchRotate = function(opt_options) {
ol.interaction.Pointer.call(this, {
handleDownEvent: ol.interaction.PinchRotate.handleDownEvent_,
handleDragEvent: ol.interaction.PinchRotate.handleDragEvent_,
handleUpEvent: ol.interaction.PinchRotate.handleUpEvent_
});
var options = opt_options || {};
/**
* @private
* @type {ol.Coordinate}
*/
this.anchor_ = null;
/**
* @private | * @type {number|undefined}
*/
this.lastAngle_ = undefined;
/**
* @private
* @type {boolean}
*/
this.rotating_ = false;
/**
* @private
* @type {number}
*/
this.rotationDelta_ = 0.0;
/**
* @private
* @type {number}
*/
this.threshold_ = options.threshold !== undefined ? options.threshold : 0.3;
/**
* @private
* @type {number}
*/
this.duration_ = options.duration !== undefined ? options.duration : 250;
};
ol.inherits(ol.interaction.PinchRotate, ol.interaction.Pointer);
/**
* @param {ol.MapBrowserPointerEvent} mapBrowserEvent Event.
* @this {ol.interaction.PinchRotate}
* @private
*/
ol.interaction.PinchRotate.handleDragEvent_ = function(mapBrowserEvent) {
goog.asserts.assert(this.targetPointers.length >= 2,
'length of this.targetPointers should be greater than or equal to 2');
var rotationDelta = 0.0;
var touch0 = this.targetPointers[0];
var touch1 = this.targetPointers[1];
// angle between touches
var angle = Math.atan2(
touch1.clientY - touch0.clientY,
touch1.clientX - touch0.clientX);
if (this.lastAngle_ !== undefined) {
var delta = angle - this.lastAngle_;
this.rotationDelta_ += delta;
if (!this.rotating_ &&
Math.abs(this.rotationDelta_) > this.threshold_) {
this.rotating_ = true;
}
rotationDelta = delta;
}
this.lastAngle_ = angle;
var map = mapBrowserEvent.map;
// rotate anchor point.
// FIXME: should be the intersection point between the lines:
// touch0,touch1 and previousTouch0,previousTouch1
var viewportPosition = map.getViewport().getBoundingClientRect();
var centroid = ol.interaction.Pointer.centroid(this.targetPointers);
centroid[0] -= viewportPosition.left;
centroid[1] -= viewportPosition.top;
this.anchor_ = map.getCoordinateFromPixel(centroid);
// rotate
if (this.rotating_) {
var view = map.getView();
var rotation = view.getRotation();
map.render();
ol.interaction.Interaction.rotateWithoutConstraints(map, view,
rotation + rotationDelta, this.anchor_);
}
};
/**
* @param {ol.MapBrowserPointerEvent} mapBrowserEvent Event.
* @return {boolean} Stop drag sequence?
* @this {ol.interaction.PinchRotate}
* @private
*/
ol.interaction.PinchRotate.handleUpEvent_ = function(mapBrowserEvent) {
if (this.targetPointers.length < 2) {
var map = mapBrowserEvent.map;
var view = map.getView();
view.setHint(ol.ViewHint.INTERACTING, -1);
if (this.rotating_) {
var rotation = view.getRotation();
ol.interaction.Interaction.rotate(
map, view, rotation, this.anchor_, this.duration_);
}
return false;
} else {
return true;
}
};
/**
* @param {ol.MapBrowserPointerEvent} mapBrowserEvent Event.
* @return {boolean} Start drag sequence?
* @this {ol.interaction.PinchRotate}
* @private
*/
ol.interaction.PinchRotate.handleDownEvent_ = function(mapBrowserEvent) {
if (this.targetPointers.length >= 2) {
var map = mapBrowserEvent.map;
this.anchor_ = null;
this.lastAngle_ = undefined;
this.rotating_ = false;
this.rotationDelta_ = 0.0;
if (!this.handlingDownUpSequence) {
map.getView().setHint(ol.ViewHint.INTERACTING, 1);
}
map.render();
return true;
} else {
return false;
}
};
/**
* @inheritDoc
*/
ol.interaction.PinchRotate.prototype.shouldStopEvent = ol.functions.FALSE; | |
operations.rs | #![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
#![allow(clippy::redundant_clone)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> azure_core::error::Result<azure_core::Response> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn compute(&self) -> compute::Client {
compute::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn private_endpoint_connections(&self) -> private_endpoint_connections::Client {
private_endpoint_connections::Client(self.clone())
}
pub fn private_link_resources(&self) -> private_link_resources::Client {
private_link_resources::Client(self.clone())
}
pub fn quotas(&self) -> quotas::Client {
quotas::Client(self.clone())
}
pub fn usages(&self) -> usages::Client {
usages::Client(self.clone())
}
pub fn virtual_machine_sizes(&self) -> virtual_machine_sizes::Client {
virtual_machine_sizes::Client(self.clone())
}
pub fn workspace_connections(&self) -> workspace_connections::Client {
workspace_connections::Client(self.clone())
}
pub fn workspace_features(&self) -> workspace_features::Client {
workspace_features::Client(self.clone())
}
pub fn workspace_skus(&self) -> workspace_skus::Client {
workspace_skus::Client(self.clone())
}
pub fn workspaces(&self) -> workspaces::Client {
workspaces::Client(self.clone())
}
}
pub mod operations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::OperationListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
#[doc = "only the first response will be fetched as the continuation token is not part of the response schema"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!("{}/providers/Microsoft.MachineLearningServices/operations", this.client.endpoint(),);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::OperationListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod workspaces {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
parameters: impl Into<models::Workspace>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
parameters: parameters.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
parameters: impl Into<models::WorkspaceUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
parameters: parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
skip: None,
}
}
#[doc = "Diagnose workspace setup issue."]
pub fn diagnose(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> diagnose::Builder {
diagnose::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
parameters: None,
}
}
pub fn list_keys(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list_keys::Builder {
list_keys::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn resync_keys(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> resync_keys::Builder {
resync_keys::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
skip: None,
}
}
pub fn list_notebook_access_token(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list_notebook_access_token::Builder {
list_notebook_access_token::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn prepare_notebook(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> prepare_notebook::Builder {
prepare_notebook::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn list_storage_account_keys(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list_storage_account_keys::Builder {
list_storage_account_keys::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
pub fn list_notebook_keys(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list_notebook_keys::Builder {
list_notebook_keys::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
#[doc = "Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) programmatically."]
pub fn list_outbound_network_dependencies_endpoints(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list_outbound_network_dependencies_endpoints::Builder {
list_outbound_network_dependencies_endpoints::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::Workspace;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200(models::Workspace),
Accepted202,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) parameters: models::Workspace,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body)?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::Workspace;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) parameters: models::WorkspaceUpdateParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
use azure_core::error::ResultExt;
type Response = models::WorkspaceListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) skip: Option<String>,
}
impl Builder {
pub fn skip(mut self, skip: impl Into<String>) -> Self {
self.skip = Some(skip.into());
self
}
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
if let Some(skip) = &this.skip {
url.query_pairs_mut().append_pair("$skip", skip);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::WorkspaceListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
pub mod diagnose {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(models::DiagnoseResponseResult),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) parameters: Option<models::DiagnoseWorkspaceParameters>,
}
impl Builder {
pub fn parameters(mut self, parameters: impl Into<models::DiagnoseWorkspaceParameters>) -> Self {
self.parameters = Some(parameters.into());
self
}
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/diagnose",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = if let Some(parameters) = &this.parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters)?
} else {
azure_core::EMPTY_BODY
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::DiagnoseResponseResult = serde_json::from_slice(&rsp_body)?;
Ok(Response::Ok200(rsp_value))
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_keys {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ListWorkspaceKeysResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/listKeys",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ListWorkspaceKeysResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod resync_keys {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/resyncKeys",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_by_subscription {
use super::models;
use azure_core::error::ResultExt;
type Response = models::WorkspaceListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) skip: Option<String>,
}
impl Builder {
pub fn skip(mut self, skip: impl Into<String>) -> Self {
self.skip = Some(skip.into());
self
}
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/workspaces",
this.client.endpoint(),
&this.subscription_id
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
if let Some(skip) = &this.skip {
url.query_pairs_mut().append_pair("$skip", skip);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::WorkspaceListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
pub mod list_notebook_access_token {
use super::models;
use azure_core::error::ResultExt;
type Response = models::NotebookAccessTokenResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/listNotebookAccessToken" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::NotebookAccessTokenResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod prepare_notebook {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200(models::NotebookResourceInfo),
Accepted202,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/prepareNotebook" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::NotebookResourceInfo = serde_json::from_slice(&rsp_body)?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_storage_account_keys {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ListStorageAccountKeysResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/listStorageAccountKeys" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential | .get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ListStorageAccountKeysResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_notebook_keys {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ListNotebookKeysResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/listNotebookKeys" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ListNotebookKeysResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_outbound_network_dependencies_endpoints {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ExternalFqdnResponse;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/outboundNetworkDependenciesEndpoints" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ExternalFqdnResponse = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod usages {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self, subscription_id: impl Into<String>, location: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
location: location.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ListUsagesResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) location: String,
}
impl Builder {
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/usages",
this.client.endpoint(),
&this.subscription_id,
&this.location
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ListUsagesResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
}
pub mod virtual_machine_sizes {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self, location: impl Into<String>, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
location: location.into(),
subscription_id: subscription_id.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::VirtualMachineSizeListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) location: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/vmSizes",
this.client.endpoint(),
&this.subscription_id,
&this.location
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::VirtualMachineSizeListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod quotas {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn update(
&self,
location: impl Into<String>,
parameters: impl Into<models::QuotaUpdateParameters>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
location: location.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
pub fn list(&self, subscription_id: impl Into<String>, location: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
location: location.into(),
}
}
}
pub mod update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::UpdateWorkspaceQuotasResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) location: String,
pub(crate) parameters: models::QuotaUpdateParameters,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/updateQuotas",
this.client.endpoint(),
&this.subscription_id,
&this.location
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::UpdateWorkspaceQuotasResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ListWorkspaceQuotas;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) location: String,
}
impl Builder {
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/quotas",
this.client.endpoint(),
&this.subscription_id,
&this.location
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ListWorkspaceQuotas = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
}
pub mod compute {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
skip: None,
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
parameters: impl Into<models::ComputeResource>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
parameters: parameters.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
parameters: impl Into<models::ClusterUpdateParameters>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
parameters: parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
underlying_resource_action: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
underlying_resource_action: underlying_resource_action.into(),
}
}
pub fn list_nodes(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
) -> list_nodes::Builder {
list_nodes::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
}
}
pub fn list_keys(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
) -> list_keys::Builder {
list_keys::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
}
}
pub fn start(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
) -> start::Builder {
start::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
}
}
pub fn stop(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
) -> stop::Builder {
stop::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
}
}
pub fn restart(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
compute_name: impl Into<String>,
) -> restart::Builder {
restart::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
compute_name: compute_name.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::PaginatedComputeResourcesList;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) skip: Option<String>,
}
impl Builder {
pub fn skip(mut self, skip: impl Into<String>) -> Self {
self.skip = Some(skip.into());
self
}
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
if let Some(skip) = &this.skip {
url.query_pairs_mut().append_pair("$skip", skip);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::PaginatedComputeResourcesList = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ComputeResource;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name,
&this.compute_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ComputeResource = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200(models::ComputeResource),
Created201(models::ComputeResource),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
pub(crate) parameters: models::ComputeResource,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name,
&this.compute_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ComputeResource = serde_json::from_slice(&rsp_body)?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ComputeResource = serde_json::from_slice(&rsp_body)?;
Ok(Response::Created201(rsp_value))
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ComputeResource;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
pub(crate) parameters: models::ClusterUpdateParameters,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name,
&this.compute_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ComputeResource = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
pub(crate) underlying_resource_action: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name,
&this.compute_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let underlying_resource_action = &this.underlying_resource_action;
url.query_pairs_mut()
.append_pair("underlyingResourceAction", underlying_resource_action);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_nodes {
use super::models;
use azure_core::error::ResultExt;
type Response = models::AmlComputeNodesInformation;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
}
impl Builder {
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/listNodes" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . compute_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::AmlComputeNodesInformation = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
pub mod list_keys {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ComputeSecrets;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/listKeys" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . compute_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ComputeSecrets = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod start {
use super::models;
use azure_core::error::ResultExt;
type Response = ();
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/start" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . compute_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod stop {
use super::models;
use azure_core::error::ResultExt;
type Response = ();
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/stop" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . compute_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod restart {
use super::models;
use azure_core::error::ResultExt;
type Response = ();
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) compute_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/restart" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . compute_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(()),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod private_endpoint_connections {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
private_endpoint_connection_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
private_endpoint_connection_name: private_endpoint_connection_name.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
private_endpoint_connection_name: impl Into<String>,
properties: impl Into<models::PrivateEndpointConnection>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
private_endpoint_connection_name: private_endpoint_connection_name.into(),
properties: properties.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
private_endpoint_connection_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
private_endpoint_connection_name: private_endpoint_connection_name.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::PrivateEndpointConnectionListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
#[doc = "only the first response will be fetched as the continuation token is not part of the response schema"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::PrivateEndpointConnectionListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::PrivateEndpointConnection;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) private_endpoint_connection_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . private_endpoint_connection_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::PrivateEndpointConnection = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::PrivateEndpointConnection;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) private_endpoint_connection_name: String,
pub(crate) properties: models::PrivateEndpointConnection,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . private_endpoint_connection_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.properties)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::PrivateEndpointConnection = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) private_endpoint_connection_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . private_endpoint_connection_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod private_link_resources {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::PrivateLinkResourceListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateLinkResources" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::PrivateLinkResourceListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod workspace_connections {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
target: None,
category: None,
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
connection_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
connection_name: connection_name.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
connection_name: impl Into<String>,
parameters: impl Into<models::WorkspaceConnection>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
connection_name: connection_name.into(),
parameters: parameters.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
connection_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
connection_name: connection_name.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::PaginatedWorkspaceConnectionsList;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) target: Option<String>,
pub(crate) category: Option<String>,
}
impl Builder {
pub fn target(mut self, target: impl Into<String>) -> Self {
self.target = Some(target.into());
self
}
pub fn category(mut self, category: impl Into<String>) -> Self {
self.category = Some(category.into());
self
}
#[doc = "only the first response will be fetched as the continuation token is not part of the response schema"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
if let Some(target) = &this.target {
url.query_pairs_mut().append_pair("target", target);
}
if let Some(category) = &this.category {
url.query_pairs_mut().append_pair("category", category);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::PaginatedWorkspaceConnectionsList = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::WorkspaceConnection;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) connection_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . connection_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::WorkspaceConnection = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create {
use super::models;
use azure_core::error::ResultExt;
type Response = models::WorkspaceConnection;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) connection_name: String,
pub(crate) parameters: models::WorkspaceConnection,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . connection_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::WorkspaceConnection = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) connection_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . workspace_name , & this . connection_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod workspace_features {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ListAmlUserFeatureResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/features",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.workspace_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ListAmlUserFeatureResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
}
pub mod workspace_skus {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::SkuListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_stream(self) -> azure_core::Pageable<Response, azure_core::error::Error> {
let make_request = move |continuation: Option<azure_core::prelude::Continuation>| {
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/workspaces/skus",
this.client.endpoint(),
&this.subscription_id
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::Other, "build request")?;
let mut req_builder = http::request::Builder::new();
let rsp = match continuation {
Some(token) => {
url.set_path("");
url = url
.join(&token.into_raw())
.context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let has_api_version_already = url.query_pairs().any(|(k, _)| k == "api-version");
if !has_api_version_already {
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
}
req_builder = req_builder.uri(url.as_str());
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
let req_body = azure_core::EMPTY_BODY;
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
None => {
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder =
req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
this.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?
}
};
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::SkuListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
};
azure_core::Pageable::new(make_request)
}
}
}
} | |
util_suite_test.go | // Copyright 2019 Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package util_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"testing" | )
func TestUtil(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Utils Test Suite")
} | |
circle-manager.js | 'use strict';
Object.defineProperty(exports, "__esModule", { | var _core = require('@angular/core');
var _Observable = require('rxjs/Observable');
var _googleMapsApiWrapper = require('../google-maps-api-wrapper');
var CircleManager = function () {
function CircleManager(_apiWrapper, _zone) {
this._apiWrapper = _apiWrapper;
this._zone = _zone;
this._circles = new Map();
}
CircleManager.prototype.addCircle = function (circle) {
this._circles.set(circle, this._apiWrapper.createCircle({
center: { lat: circle.latitude, lng: circle.longitude },
clickable: circle.clickable,
draggable: circle.draggable,
editable: circle.editable,
fillColor: circle.fillColor,
fillOpacity: circle.fillOpacity,
radius: circle.radius,
strokeColor: circle.strokeColor,
strokeOpacity: circle.strokeOpacity,
strokePosition: circle.strokePosition,
strokeWeight: circle.strokeWeight,
visible: circle.visible,
zIndex: circle.zIndex
}));
};
/**
* Removes the given circle from the map.
*/
CircleManager.prototype.removeCircle = function (circle) {
var _this = this;
return this._circles.get(circle).then(function (c) {
c.setMap(null);
_this._circles.delete(circle);
});
};
CircleManager.prototype.setOptions = function (circle, options) {
return this._circles.get(circle).then(function (c) {
return c.setOptions(options);
});
};
CircleManager.prototype.getBounds = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.getBounds();
});
};
CircleManager.prototype.getCenter = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.getCenter();
});
};
CircleManager.prototype.getRadius = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.getRadius();
});
};
CircleManager.prototype.setCenter = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.setCenter({ lat: circle.latitude, lng: circle.longitude });
});
};
CircleManager.prototype.setEditable = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.setEditable(circle.editable);
});
};
CircleManager.prototype.setDraggable = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.setDraggable(circle.draggable);
});
};
CircleManager.prototype.setVisible = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.setVisible(circle.visible);
});
};
CircleManager.prototype.setRadius = function (circle) {
return this._circles.get(circle).then(function (c) {
return c.setRadius(circle.radius);
});
};
CircleManager.prototype.createEventObservable = function (eventName, circle) {
var _this = this;
return _Observable.Observable.create(function (observer) {
var listener = null;
_this._circles.get(circle).then(function (c) {
listener = c.addListener(eventName, function (e) {
return _this._zone.run(function () {
return observer.next(e);
});
});
});
return function () {
if (listener !== null) {
listener.remove();
}
};
});
};
return CircleManager;
}();
exports.CircleManager = CircleManager;
CircleManager.decorators = [{ type: _core.Injectable }];
/** @nocollapse */
CircleManager.ctorParameters = function () {
return [{ type: _googleMapsApiWrapper.GoogleMapsAPIWrapper }, { type: _core.NgZone }];
};
//# sourceMappingURL=circle-manager.js.map | value: true
});
exports.CircleManager = undefined;
|
radio.js | import React from 'react';
const ReactDOM = require('react-dom');
let id = 0;
class Radio extends React.PureComponent {
constructor(props) {
super(props);
this.onChange = this.onChange.bind(this);
this.id = id++;
}
onChange(e) {
this.props.updateProps({ value: e.target.value });
}
render() {
const {
idyll,
hasError,
updateProps,
options,
value,
...props
} = this.props;
return (
<div
{...props}
onClick={this.props.onClick || (e => e.stopPropagation())}
>
{options.map(d => {
if (typeof d === 'string') {
return (
<label key={d}>
<input
type="radio"
checked={d === value}
onChange={this.onChange}
value={d}
name={this.id}
/>
{d}
</label>
);
}
return (
<label key={d.value}>
<input
type="radio"
checked={d.value === value}
onChange={this.onChange}
value={d.value}
name={this.id}
/>
{d.label || d.value}
</label>
);
})}
</div>
);
}
}
Radio.defaultProps = {
options: []
};
Radio._idyll = {
name: 'Radio',
tagType: 'closed',
props: [
{
name: 'value', | description: 'The value of the "checked" radio button'
},
{
name: 'options',
type: 'array',
example: '`["option1", "option2"]`',
description:
'an array representing the different buttons. Can be an array of strings like `["val1", "val2"]` or an array of objects `[{ value: "val1", label: "Value 1" }, { value: "val2", label: "Value 2" }]`.'
}
]
};
export default Radio; | type: 'string',
example: 'x', |
toboggan_trajectory.py | # Pseudo code:
# assume original map is narrow (has more rows than columns)
# transform map to array
# no. of steps downwards = no. of rows
# no. of map copies = ceil((no. of steps downwards - 1) * 3 / no. of columns)
# start at (i, j) = (0, 0)
# move across to (i + 3, j + 1)
# if element == '#', increment num_trees
# Let's try to do this without using numpy ;-p
# NB: If using numpy, could make use of concatenate, hstack, etc.
# to stack (repeat) copies of original map to the right.
# But without numpy, we'll try to use zip instead...
file = open("input.txt", "r")
map_original = [] # will be a 2D array containing original map
num_rows = 0
for line in file:
num_rows += 1
map_original.append(list(line.strip()))
map_full = map_original # map_full will be a 2D array containing full (repeated) map
# number of map copies needed horizontally
num_copies = int((num_rows - 1) * 3 / len(map_original[0])) + 1 # if using numpy, use np.ceil instead of +1
for i in range(num_copies):
# append map_full with copy of map_original
map_full = [(map_full + map_original) for map_full, map_original in zip(map_full, map_original)]
# start at position (0, 0)
column = 0
row = 0
num_trees = 0
while row < (num_rows - 1):
column += 3
row += 1
if map_full[row][column] == "#":
|
print("num_trees: ", num_trees)
| num_trees += 1 |
model_audio.go | package model
import (
"encoding/json"
"errors"
"github.com/huaweicloud/huaweicloud-sdk-go-v3/core/converter"
"strings"
)
type Audio struct {
// 输出策略。 取值如下: - discard - transcode >- 当视频参数中的“output_policy”为\"discard\",且音频参数中的“output_policy”为“transcode”时,表示只输出音频。 >- 当视频参数中的“output_policy”为\"transcode\",且音频参数中的“output_policy”为“discard”时,表示只输出视频。 >- 同时为\"discard\"时不合法。 >- 同时为“transcode”时,表示输出音视频。
OutputPolicy *AudioOutputPolicy `json:"output_policy,omitempty"`
// 音频编码格式。 取值如下: - 1:AAC格式。 - 2:HEAAC1格式 。 - 3:HEAAC2格式。 - 4:MP3格式 。
Codec int32 `json:"codec"`
// 音频采样率。 取值如下: - 1:AUDIO_SAMPLE_AUTO - 2:AUDIO_SAMPLE_22050(22050Hz) - 3:AUDIO_SAMPLE_32000(32000Hz) - 4:AUDIO_SAMPLE_44100(44100Hz) - 5:AUDIO_SAMPLE_48000(48000Hz) - 6:AUDIO_SAMPLE_96000(96000Hz)
SampleRate int32 `json:"sample_rate"`
// 音频码率。 取值范围:0或[8,1000]。 单位:kbit/s。
Bitrate *int32 `json:"bitrate,omitempty"`
// 声道数。 取值如下: - 1:AUDIO_CHANNELS_1 - 2:AUDIO_CHANNELS_2 - 6:AUDIO_CHANNELS_5_1
Channels int32 `json:"channels"`
}
func (o Audio) String() string {
data, err := json.Marshal(o)
if err != nil {
return "Audio struct{}"
}
| return strings.Join([]string{"Audio", string(data)}, " ")
}
type AudioOutputPolicy struct {
value string
}
type AudioOutputPolicyEnum struct {
TRANSCODE AudioOutputPolicy
DISCARD AudioOutputPolicy
COPY AudioOutputPolicy
}
func GetAudioOutputPolicyEnum() AudioOutputPolicyEnum {
return AudioOutputPolicyEnum{
TRANSCODE: AudioOutputPolicy{
value: "transcode",
},
DISCARD: AudioOutputPolicy{
value: "discard",
},
COPY: AudioOutputPolicy{
value: "copy",
},
}
}
func (c AudioOutputPolicy) MarshalJSON() ([]byte, error) {
return json.Marshal(c.value)
}
func (c *AudioOutputPolicy) UnmarshalJSON(b []byte) error {
myConverter := converter.StringConverterFactory("string")
if myConverter != nil {
val, err := myConverter.CovertStringToInterface(strings.Trim(string(b[:]), "\""))
if err == nil {
c.value = val.(string)
return nil
}
return err
} else {
return errors.New("convert enum data to string error")
}
} | |
unio_sv.ts | <TS language="sv" version="2.0">
<context>
<name>AddressBookPage</name>
<message>
<source>Create a new address</source>
<translation>Skapa en ny adress</translation>
</message>
<message>
<source>&New</source>
<translation>&Ny</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopiera den valda adressen till systemurklippet</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopiera</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Radera den valda adressen från listan</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Radera</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportera datan från fliken till en fil</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exportera</translation>
</message>
<message>
<source>C&lose</source>
<translation>S&täng</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Välj en adress att skicka mynt till</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Välj adressen att motta mynt från</translation>
</message>
<message>
<source>C&hoose</source>
<translation>V&älj</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Avsändaradresser</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Mottagaradresser</translation>
</message>
<message>
<source>These are your Unio addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>De här är dina Unio-adresser för att skicka betalningar. Kontrollera alltid mängden och mottagaradressen innan du skickar mynt.</translation>
</message>
<message>
<source>These are your Unio addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>De här är dina Unio-adresser för betalningsmottagningar. Det rekommenderas att använda en ny mottagaradress för varje transaktion.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Kopiera adress</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Kopiera &etikett</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Redigera</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Exportera adresslistan</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommaseparerad fil (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exporteringen misslyckades</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Etikett</translation>
</message>
<message>
<source>Address</source>
<translation>Adress</translation>
</message>
<message>
<source>(no label)</source>
<translation>(Ingen etikett)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Lösenfrasdialog</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Ange lösenfras</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Ny lösenfras</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Upprepa ny lösenfras</translation>
</message>
<message>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Arbetar för att inaktivera de triviala sändpengarna när OS-kontot är komprometterat. Ger ingen reell säkerhet.</translation>
</message>
<message>
<source>For anonymization only</source>
<translation>Endast för anonymisering</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Kryptera plånbok</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Denna handling kräver din plånboks lösenfras för att låsa upp plånboken.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Lås upp plånbok</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Denna handling kräver din plånboks lösenfras för att dekryptera plånboken.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Dekryptera plånbok</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Ändra lösenfras</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Skriv in den gamla och den nya lösenfrasen för plånboken.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Bekräfta plånbokskryptering</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR UNIO</b>!</source>
<translation>Varning: Om du krypterar din plånbok och förlorar din lösenfras kommer du att <b>FÖRLORA ALLA DINA UNIO</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Är du säker på att du vill kryptera din plånbok?</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Plånbok krypterad</translation>
</message>
<message>
<source>Unio will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your unios from being stolen by malware infecting your computer.</source>
<translation>Unio kommer nu att färdigställa krypteringsprocessen. Kom ihåg att krypteringen av din plånbok inte kan skydda dig helt och hållet från att dina Unio stjäls av skadeprogram som har infekterat din dator.</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>VIKTIGT: Alla tidigare säkerhetskopior du har gjort av plånboksfilen bör ersättas med den nygenererade krypterade plånboksfilen. Av säkerhetsskäl kommer tidigare säkerhetskopior av den okrypterade plånboksfilen bli oanvändbara så fort du använder den nya krypterade plånboken.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Plånbokskrypteringen misslyckades</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Plånbokskrypteringen misslyckades på grund av ett internt fel. Din plånbok krypterades inte.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Den angivna lösenfrasen överensstämmer inte.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Plånboksupplåsning misslyckades</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Den inmatade lösenfrasen för plånboksdekrypteringen var felaktig.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Plånboksdekryptering misslyckades.</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Plånbokens lösenfras ändrades framgångsrikt.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Varning: Caps Lock är aktiverat!</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Unio Core</source>
<translation>Unio Core
</translation>
</message>
<message>
<source>Wallet</source>
<translation>Plånbok</translation>
</message>
<message>
<source>Node</source>
<translation>Nod</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Översikt</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Visa allmän plånboksöversikt</translation>
</message>
<message>
<source>&Send</source>
<translation>&Skicka</translation>
</message>
<message>
<source>Send coins to a Unio address</source>
<translation>Skicka mynt till en Unio-adress</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Motta</translation>
</message>
<message>
<source>Request payments (generates QR codes and unio: URIs)</source>
<translation>Begär betalningar (genererar QR-koder och Unio:-URI:s)</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaktioner</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Bläddra i transaktionshistoriken</translation>
</message>
<message>
<source>E&xit</source>
<translation>A&vsluta</translation>
</message>
<message>
<source>Quit application</source>
<translation>Avsluta applikationen</translation>
</message>
<message>
<source>&About Unio Core</source>
<translation>&Om Unio Core</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Om &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Visa information om Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Alternativ...</translation>
</message>
<message>
<source>Modify configuration options for Unio</source>
<translation>Anpassa konfigurationsalternatv för Unio</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Visa/göm</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Visa eller göm huvudfönstret</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Kryptera plånbok...</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Kryptera de privata nycklarna vilka tillhör din plånbok</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Säkerhetskopiera plånbok...</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Säkerhetskopiera plånboken till en annan plats</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Ändra lösenfras...</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ändra lösenfrasen som används för plånbokskryptering</translation>
</message>
<message>
<source>&Unlock Wallet...</source>
<translation>&Lås upp plånbok...</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Lås upp plånbok</translation>
</message>
<message>
<source>&Lock Wallet</source>
<translation>&Lås plånbok</translation>
</message>
<message>
<source>Sign &message...</source>
<translation>Signera &meddelande...</translation>
</message>
<message>
<source>Sign messages with your Unio addresses to prove you own them</source>
<translation>Signera meddelanden med dina Unio-adresser för att bevisa att du äger dem</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Bekräfta meddelande...</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Unio addresses</source>
<translation>Bekräfta meddelanden för att garantera att de signerades med de angivna Unio-adresserna</translation>
</message>
<message>
<source>&Information</source>
<translation>&Information</translation>
</message>
<message>
<source>Show diagnostic information</source>
<translation>Visa diagnostisk information</translation>
</message>
<message>
<source>&Debug console</source>
<translation>&Avsökningskonsol</translation>
</message>
<message>
<source>Open debugging console</source>
<translation>Öppna avsökningskonsol</translation>
</message>
<message>
<source>&Network Monitor</source>
<translation>&Nätverksövervakare</translation>
</message>
<message>
<source>Show network monitor</source>
<translation>Visa nätverksövervakare</translation>
</message>
<message>
<source>Open &Configuration File</source>
<translation>Öppna &Konfigurationsfil</translation>
</message>
<message>
<source>Open configuration file</source>
<translation>Öppna konfigurationsfil</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Avsändaradresser...</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Visa listan för redan använda avsändaradresser och etiketter</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Mottagaradresser...</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Visa listan för redan använda mottagaradresser och etiketter</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Öppna &URI...</translation>
</message>
<message>
<source>Open a unio: URI or payment request</source>
<translation>Öppna en Unio-URI eller betalningsbegäran</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Kommandoradalternativ</translation>
</message>
<message>
<source>Show the Unio Core help message to get a list with possible Unio command-line options</source>
<translation>Visa Unio Core-hjälpmeddelandet för att få en lista med möjliga Unio-kommandoradalternativ</translation>
</message>
<message>
<source>&File</source>
<translation>&Fil</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Inställningar</translation>
</message>
<message>
<source>&Tools</source>
<translation>&Verktyg</translation>
</message>
<message>
<source>&Help</source>
<translation>&Hjälp</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Verktygsfält för tabbar</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synkroniserar med nätverk...</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Importerar block från disk...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Återindexerar block på disk...</translation>
</message>
<message>
<source>No block source available...</source>
<translation>Ingen tillgänglig blockkälla...</translation>
</message>
<message>
<source>Up to date</source>
<translation>Aktuell</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 och %2</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 bakom</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Knappar in...</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Senast mottagna block genererades för %1 sedan.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transaktioner efter denna kommer ännu inte vara synliga.</translation>
</message>
<message>
<source>Error</source>
<translation>Fel</translation>
</message>
<message>
<source>Warning</source>
<translation>Varning</translation>
</message>
<message>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Skickad transaktion</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Inkommande transaktion</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1
Mängd: %2
Typ: %3
Adress: %4
</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Plånboken är <b>krypterad</b> och för närvarande <b>olåst</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b> for anonimization only</source>
<translation>Plånboken är <b>krypterad</b> och för närvarande <b>olåst</b> endast för anonymisering</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Plånboken är <b>krypterad</b> och för närvarande <b>låst</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Nätverkslarm</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Quantity:</source>
<translation>Antal:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Mängd:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioritet:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Avgift:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Efter avgift:</translation>
</message>
<message>
<source>Change:</source>
<translation>Växel:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(av)markera alla</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Trädmetod</translation>
</message>
<message>
<source>List mode</source>
<translation>Listmetod</translation>
</message>
<message>
<source>(1 locked)</source>
<translation>(1 låst)</translation>
</message>
<message>
<source>Amount</source>
<translation>Mängd</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Bekräftelser
</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bekräftad</translation>
</message>
<message>
<source>Priority</source>
<translation>Prioritet</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopiera adress</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopiera etikett</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopiera mängd</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopiera transaktions-id</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Lås ospenderat</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Lås upp ospenderat</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopiera antal</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopiera avgift</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopiera efter avgift</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopiera bytes</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopiera prioritet</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopiera växel</translation>
</message>
<message>
<source>highest</source>
<translation>högst</translation>
</message>
<message>
<source>higher</source>
<translation>högre</translation>
</message>
<message>
<source>high</source>
<translation>hög</translation>
</message>
<message>
<source>medium-high</source>
<translation>medelhög</translation>
</message>
<message>
<source>n/a</source>
<translation>E/t</translation>
</message>
<message>
<source>medium</source>
<translation>medel</translation>
</message>
<message>
<source>low-medium</source>
<translation>medellåg</translation>
</message>
<message>
<source>low</source>
<translation>låg</translation>
</message>
<message>
<source>lower</source>
<translation>lägre</translation>
</message>
<message>
<source>lowest</source>
<translation>lägst</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 låst)</translation>
</message>
<message>
<source>none</source>
<translation>inga</translation>
</message>
<message>
<source>yes</source>
<translation>ja</translation>
</message>
<message>
<source>no</source>
<translation>nej</translation>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Denna etikett blir röd om transaktionsstorleken är större än 1000 bytes.</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Detta innebär att en avgift om åtminstone %1 krävs per kB.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Kan variera +/- 1 byte per indata.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transaktioner med högre prioritet är mer benägna att inkluderas i ett block.</translation>
</message>
<message>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>Denna etikett blir röd om prioriteten är mindre än "medel".</translation>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Denna etikett blir röd om en mottagare mottar en mängd mindre än %1.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(Ingen etikett)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>växel från %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(växel)</translation>
</message>
</context>
<context>
<name>DarksendConfig</name>
<message>
<source>Configure Darksend</source>
<translation>Konfigurera Darksend</translation>
</message>
<message>
<source>Basic Privacy</source>
<translation>Grundläggande integritet</translation>
</message>
<message>
<source>High Privacy</source>
<translation>Hög integritet</translation>
</message>
<message>
<source>Maximum Privacy</source>
<translation>Maximal integritet</translation>
</message>
<message>
<source>Please select a privacy level.</source>
<translation>Vänligen välj en integritetsnivå.</translation>
</message>
<message>
<source>Use 2 separate masternodes to mix funds up to 1000 UNIO</source>
<translation>Använd 2 enskilda masternoder för att mixa medel upp till 1000 UNIO</translation>
</message>
<message>
<source>Use 8 separate masternodes to mix funds up to 1000 UNIO</source>
<translation>Använd 8 enskilda masternoder för att mixa medel upp till 1000 UNIO.</translation>
</message>
<message>
<source>Use 16 separate masternodes</source>
<translation>Använd 16 enskilda masternoder</translation>
</message>
<message>
<source>This option is the quickest and will cost about ~0.025 UNIO to anonymize 1000 UNIO</source>
<translation>Detta alternativ är det snabbaste och kommer att kosta omkring ~0,025 UNIO för att anonymisera 1000 UNIO</translation>
</message>
<message>
<source>This option is moderately fast and will cost about 0.05 UNIO to anonymize 1000 UNIO</source>
<translation>Detta alternativ är relativt snabbt och kommer att kosta omkring 0,05 UNIO för att anonymisera 1000 UNIO</translation>
</message>
<message>
<source>0.1 UNIO per 1000 UNIO you anonymize.</source>
<translation>0,1 UNIO per 1000 UNIO du anonymiserar.</translation>
</message>
<message>
<source>This is the slowest and most secure option. Using maximum anonymity will cost</source>
<translation>Detta är det långsammaste och det säkraste alternativet. Användning av maximal anonymitet kommer att kosta</translation>
</message>
<message>
<source>Darksend Configuration</source>
<translation>Darksend-konfiguration</translation>
</message>
<message>
<source>Darksend was successfully set to basic (%1 and 2 rounds). You can change this at any time by opening Unio's configuration screen.</source>
<translation>Darksend ställdes framgångsrikt om till grundläggande (%1 och 2 omgångar). Du kan ändra detta när som helst genom att öppna Unio:s konfigurationsskärm.</translation>
</message>
<message>
<source>Darksend was successfully set to high (%1 and 8 rounds). You can change this at any time by opening Unio's configuration screen.</source>
<translation>Darksend ställdes framgångsrikt in på hög (%1 och 8 omgångar). Du kan ändra detta när som helst genom att öppna Unio:s konfigurationsskärm.</translation>
</message>
<message>
<source>Darksend was successfully set to maximum (%1 and 16 rounds). You can change this at any time by opening Unio's configuration screen.</source>
<translation>Darksend ställdes framgångsrikt in på maximal (%1 och 16 omgångar). Du kan ändra detta när som helst genom att öppna Unio:s konfigurationsskärm</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Redigera Adress</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etikett</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>Den associerade etiketten med den här adresslistans inmatning</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adress</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>Den associerade adressen med den här adresslistans post. Detta kan endast ändras för avsändaradresser.</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Ny mottagaradress</translation>
</message>
<message>
<source>New sending address</source>
<translation>Ny avsändaradress</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Redigera mottagaradress</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Redigera avsändaradress</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Unio address.</source>
<translation>Den angivna adressen "%1" är inte en giltig Unio-adress.</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>Den angivna adressen "%1" finns redan i adressboken.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Plånboken kunde inte låsas upp.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Nygenerering av nyckel misslyckades.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>En ny datakatalog kommer att skapas.</translation>
</message>
<message>
<source>name</source>
<translation>namn</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Katalogen finns redan. Lägg till %1 om du tänker skapa en ny katalog här.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Sökvägen finns redan och är inte en katalog.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Kan inte skapa en datakatalog här.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Unio Core</source>
<translation>Unio Core
</translation>
</message>
<message>
<source>version</source>
<translation>version</translation>
</message>
<message>
<source>Usage:</source>
<translation>Användning:</translation>
</message>
<message>
<source>command-line options</source>
<translation>kommandoradalternativ</translation>
</message>
<message>
<source>UI options</source>
<translation>UI-alternativ</translation>
</message>
<message>
<source>Choose data directory on startup (default: 0)</source>
<translation>Välj datakatalog vid uppstart (standardvärde: 0)</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Ställ in språk, till exempel "de_DE" (standardvärde: system locale)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Starta minimerat</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Ställ in SSL-root-certifikat för betalningsbegäranden (standardvärde: -system-)</translation>
</message>
<message>
<source>Show splash screen on startup (default: 1)</source>
<translation>Visa startbilden vid uppstart (standardvärde: 1)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Välkommen</translation>
</message>
<message>
<source>Welcome to Unio Core.</source>
<translation>Välkommen till Unio Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Unio Core will store its data.</source>
<translation>Då detta är första gången programmet startas kan du välja var Unio Core ska lagra sin data.</translation>
</message>
<message>
<source>Unio Core will download and store a copy of the Unio block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>Unio Core kommer att laddas ned och lagra en kopia av Unio:s blockkedja. Minst %1 GB data kommer att lagras i denna katalog och den kommer att växa med tiden. Plånboken kommer också att lagras i denna katalog.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Använd den förvalda datakatalogen</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Använd en anpassad datakatalog:</translation>
</message>
<message>
<source>Error</source>
<translation>Fel</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Öppna URI</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Öppna betalningsbegäran från URI eller fil</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Välj betalningsbegäranfil</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Välj en betalningsbegäranfil att öppna</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Alternativ</translation>
</message>
<message>
<source>&Main</source>
<translation>&Huvud</translation>
</message>
<message>
<source>Automatically start Unio after logging in to the system.</source>
<translation>Starta Unio automatiskt efter systeminloggning.</translation>
</message>
<message>
<source>&Start Unio on system login</source>
<translation>&Starta Unio vid systeminloggning</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Storlek på &databascache</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Antal skript&bekräftelsestrådar</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = auto, <0 = lämna så många kärnor lediga)</translation>
</message>
<message>
<source>Darksend rounds to use</source>
<translation>Darksend-omgångar att använda</translation>
</message>
<message>
<source>This amount acts as a threshold to turn off Darksend once it's reached.</source>
<translation>Denna mängd fungerar som en tröskel att stänga av Darksend då det har uppnåtts.</translation>
</message>
<message>
<source>Amount of Unio to keep anonymized</source>
<translation>Mängd Unio att bibehålla anonymiserade</translation>
</message>
<message>
<source>W&allet</source>
<translation>P&lånbok</translation>
</message>
<message>
<source>Expert</source>
<translation>Expert</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Om myntkontrollfunktioner ska visas eller inte</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>Aktivera mynt&kontrollfunktioner</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>&Spendera obekräftad växel</translation>
</message>
<message>
<source>&Network</source>
<translation>&Nätverk</translation>
</message>
<message>
<source>Automatically open the Unio client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Öppna Unio:s klientport automatiskt på routern. Detta fungerar bara om din router stöder UPnP och är aktiverad.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Kartlägg port med hjälp av &UPnP</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Proxy-&IP: </translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>Proxyns IP-adress (t.ex. IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Port: </translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Proxyns port (t.ex. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>&Fönster</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Visa endast en systemfältikon vid fönsterminimering.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimera till systemfältet istället för till aktivitetsfältet</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimera applikationen istället för att stänga ner den när fönstret stängs. När detta alternativ är aktiverat kommer applikationen endast att stängas efter att man väljer Avsluta i menyn.</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimera vid stängning</translation>
</message>
<message>
<source>&Display</source>
<translation>&Visa</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>Användargränssnitt&språk: </translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting Unio.</source>
<translation>Användargränssnittspråket kan ställas in här. Denna inställning träder i kraft efter att Unio startats om.</translation>
</message>
<message>
<source>Language missing or translation incomplete? Help contributing translations here:
https://www.transifex.com/projects/p/unio/</source>
<translation>Fattas språk eller är det en ofullständig översättning? Hjälp till att bidra med översättningar här:
https://www.transifex.com/projects/p/unio/</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Enhet att visa mängder i: </translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Välj standardindelningenheten som ska visas i gränssnittet och när mynt skickas.</translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>Tredjeparts-URL:er (t.ex. en blockutforskare) som finns i transaktionstabben som ett menyval i sammanhanget. %s i URL:en ersätts med transaktionshashen. Flera URL:er är avskilda med det vertikala strecket: |.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>Tredjeparttransaktion-URL:er</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Aktiva kommandoradalternativ som åsidosätter alternativen ovan:</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Återställ alla klientinställningar till standardvärden.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Återställ Alternativ</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Avbryt</translation>
</message>
<message>
<source>default</source>
<translation>standardvärde</translation>
</message>
<message>
<source>none</source>
<translation>ingen</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Bekräfta alternativåterställning</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Klientomstart krävs för att aktivera ändringar.</translation>
</message>
<message>
<source>Client will be shutdown, do you want to proceed?</source>
<translation>Klienten kommer att stängas ned, vill du fortsätta?</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Denna ändring kommer att kräva en klientomstart.</translation>
</message> | <source>The supplied proxy address is invalid.</source>
<translation>Den angivna proxyadressen är ogiltig.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formulär</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Unio network after a connection is established, but this process has not completed yet.</source>
<translation>Den visade informationen kan vara utdaterad. Din plånbok synkroniseras automatiskt med Unio-nätverket efter att en anslutning har etablerats men denna process har ännu inte slutförts.</translation>
</message>
<message>
<source>Available:</source>
<translation>Tillgängligt:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Ditt nuvarande spenderbara saldo</translation>
</message>
<message>
<source>Pending:</source>
<translation>Pågående:</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Totala antalet transaktioner som ännu inte har bekräftats och som ännu inte räknas med i spenderbart saldo.</translation>
</message>
<message>
<source>Immature:</source>
<translation>Omogen:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Det utvunna saldot som ännu inte har mognat</translation>
</message>
<message>
<source>Total:</source>
<translation>Totalt:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>Ditt totalsaldo för närvarande</translation>
</message>
<message>
<source>Status:</source>
<translation>Status:</translation>
</message>
<message>
<source>Enabled/Disabled</source>
<translation>Aktiverad/inaktiverad</translation>
</message>
<message>
<source>Completion:</source>
<translation>Förlopp:</translation>
</message>
<message>
<source>Darksend Balance:</source>
<translation>Darksend-medel:</translation>
</message>
<message>
<source>Amount and Rounds:</source>
<translation>Mängd och omgångar:</translation>
</message>
<message>
<source>0 UNIO / 0 Rounds</source>
<translation>0 UNIO/0 omgångar</translation>
</message>
<message>
<source>Submitted Denom:</source>
<translation>Inmatad denom:</translation>
</message>
<message>
<source>n/a</source>
<translation>E/t</translation>
</message>
<message>
<source>Darksend</source>
<translation>Darksend</translation>
</message>
<message>
<source>Start/Stop Mixing</source>
<translation>Starta/stoppa mixning</translation>
</message>
<message>
<source>(Last Message)</source>
<translation>(Senaste meddelande)</translation>
</message>
<message>
<source>Try to manually submit a Darksend request.</source>
<translation>Försök att lämna in en Darksend-begäran manuellt.</translation>
</message>
<message>
<source>Try Mix</source>
<translation>Försök att mixa</translation>
</message>
<message>
<source>Reset the current status of Darksend (can interrupt Darksend if it's in the process of Mixing, which can cost you money!)</source>
<translation>Återställ den nuvarande Darksend-statusen (kan störa Darksend om den håller på att mixa vilket kan kosta dig pengar!)</translation>
</message>
<message>
<source>Reset</source>
<translation>Återställ</translation>
</message>
<message>
<source>out of sync</source>
<translation>osynkroniserad</translation>
</message>
<message>
<source>Disabled</source>
<translation>Inaktiverad</translation>
</message>
<message>
<source>Start Darksend Mixing</source>
<translation>Påbörja Darksend-mixning</translation>
</message>
<message>
<source>Stop Darksend Mixing</source>
<translation>Stoppa Darksend-mixning</translation>
</message>
<message>
<source>No inputs detected</source>
<translation>Inga inmatningar hittades</translation>
</message>
<message>
<source>Enabled</source>
<translation>Aktiverad</translation>
</message>
<message>
<source>Last Darksend message:
</source>
<translation>Senaste Darksend-meddelande:
</translation>
</message>
<message>
<source>N/A</source>
<translation>E/t</translation>
</message>
<message>
<source>Darksend was successfully reset.</source>
<translation>Darksend återställdes framgångsrikt.</translation>
</message>
<message>
<source>Darksend requires at least %1 to use.</source>
<translation>Darksend kräver åtminstone %1 att använda.</translation>
</message>
<message>
<source>Wallet is locked and user declined to unlock. Disabling Darksend.</source>
<translation>Plånboken är låst och användaren avböjde upplåsning. Inaktiverar Darksend.</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>Payment request error</source>
<translation>Fel vid betalningsbegäran</translation>
</message>
<message>
<source>Cannot start unio: click-to-pay handler</source>
<translation>Kan inte starta unio: klicka-för-att-betala hanterare</translation>
</message>
<message>
<source>URI handling</source>
<translation>URI-hantering</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Betalningsbegäran för att hämta-URL är ogiltig: %1</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Hantering av betalningsbegäranfil</translation>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Obekräftade betalningsbegäranden till anpassade betalningsskript stöds inte.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Den begärda betalningsmängden om %1 är för smått (anses vara damm).</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Återbetalning från %1</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Kommunikationsfel med %1: %2</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Dålig respons från server %1</translation>
</message>
<message>
<source>Network request error</source>
<translation>Fel vid närverksbegäran</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Betalning erkänd</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Spara Bild...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>&Kopiera Bild</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>Spara QR-kod</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>PNG-bild (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Tools window</source>
<translation>Verktygsfönster</translation>
</message>
<message>
<source>&Information</source>
<translation>&Information</translation>
</message>
<message>
<source>General</source>
<translation>Allmänt</translation>
</message>
<message>
<source>Name</source>
<translation>Namn</translation>
</message>
<message>
<source>Client name</source>
<translation>Klientnamn</translation>
</message>
<message>
<source>N/A</source>
<translation>E/t</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Antal anslutningar</translation>
</message>
<message>
<source>Open the Unio debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Öppna Unios avsökningsloggfil från den nuvarande datakatalogen. Detta kan ta ett par sekunder för stora loggfiler.</translation>
</message>
<message>
<source>&Open</source>
<translation>&Öppna</translation>
</message>
<message>
<source>Startup time</source>
<translation>Uppstarttid</translation>
</message>
<message>
<source>Network</source>
<translation>Nätverk</translation>
</message>
<message>
<source>Last block time</source>
<translation>Senaste blocktid</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Avsökningsloggfil</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Använder OpenSSL-version</translation>
</message>
<message>
<source>Build date</source>
<translation>Kompileringsdatum</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Nuvarande antal block</translation>
</message>
<message>
<source>Client version</source>
<translation>Klientversion</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blockkedja</translation>
</message>
<message>
<source>&Console</source>
<translation>&Konsol</translation>
</message>
<message>
<source>Clear console</source>
<translation>Rensa konsollen</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Nätverkstrafik</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Rensa</translation>
</message>
<message>
<source>Totals</source>
<translation>Sammanlagt</translation>
</message>
<message>
<source>In:</source>
<translation>In:</translation>
</message>
<message>
<source>Out:</source>
<translation>Ut:</translation>
</message>
<message>
<source>Welcome to the Unio RPC console.</source>
<translation>Välkommen till Unios RPC-konsol.</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Använd upp- och ner-pilarna för att navigera i historiken och <b>Ctrl-L</b> för att rensa skärmen.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Skriv <b>help</b> för en översikt av alla tillgängliga kommandon.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>Åt&eranvänd en befintlig mottagaradress (rekommenderas inte)</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Unio network.</source>
<translation>Ett valfritt meddelande att bifoga betalningsbegärandet vilket kommer att visas när begärandet öppnas. Observera: Meddelandet kommer inte att skickas med betalningen över Unio-nätverket.</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Meddelande:</translation>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation>En valfri etikett att kopplas samman med den nya mottagaradressen.</translation>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Använd detta formulär för att begära betalningar. Alla fält är <b>valfria</b>.</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etikett:</translation>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>En valfri summa att begära. Lämna denna tom eller nollad för att inte begära en specifik summa.</translation>
</message>
<message>
<source>&Amount:</source>
<translation>&Mängd:</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Begär betalning</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Rensa alla formulärfälten</translation>
</message>
<message>
<source>Clear</source>
<translation>Rensa</translation>
</message>
<message>
<source>Requested payments history</source>
<translation>Begärd betalningshistorik</translation>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Visa de valda begäranden (gör samma som vid ett dubbelklick på en inmatning)</translation>
</message>
<message>
<source>Show</source>
<translation>Visa</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Ta bort de valda inmatningarna från listan</translation>
</message>
<message>
<source>Remove</source>
<translation>Ta bort</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopiera etikett</translation>
</message>
<message>
<source>Copy message</source>
<translation>Kopiera meddelande</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopiera mängd</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR-kod</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Kopiera &URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Kopiera &Adress</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Spara Bild...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Begär betalning till %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Betalningsinformation</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adress</translation>
</message>
<message>
<source>Amount</source>
<translation>Mängd</translation>
</message>
<message>
<source>Label</source>
<translation>Etikett</translation>
</message>
<message>
<source>Message</source>
<translation>Meddelande</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Den slutgiltiga URI:n är för lång, försök att korta ned texten för etiketten/meddelandet.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Fel vid kodning av URI till QR-kod.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Label</source>
<translation>Etikett</translation>
</message>
<message>
<source>Message</source>
<translation>Meddelande</translation>
</message>
<message>
<source>Amount</source>
<translation>Mängd</translation>
</message>
<message>
<source>(no label)</source>
<translation>(ingen etikett)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(inget meddelande)</translation>
</message>
<message>
<source>(no amount)</source>
<translation>(ingen mängd)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Skicka mynt</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Myntkontrollfunktioner</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Indatan...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>automatiskt vald</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Otillräckliga medel!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Antal:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Mängd:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioritet:</translation>
</message>
<message>
<source>medium</source>
<translation>medel</translation>
</message>
<message>
<source>Fee:</source>
<translation>Avgift:</translation>
</message>
<message>
<source>no</source>
<translation>nej</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Efter avgift:</translation>
</message>
<message>
<source>Change:</source>
<translation>Växel:</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Om denna är aktiverad men växeladressen är tom eller ogiltig kommer växeln att skickas till en nygenererad adress.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Specialväxeladress</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Bekräfta sändningshandlingen</translation>
</message>
<message>
<source>S&end</source>
<translation>S&kicka</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Rensa alla formulärfälten</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Rensa &alla</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Skicka till flera mottagare samtidigt</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Lägg till &mottagare</translation>
</message>
<message>
<source>Darksend</source>
<translation>Darksend</translation>
</message>
<message>
<source>InstantX</source>
<translation>InstantX</translation>
</message>
<message>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopiera antal</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopiera mängd</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopiera avgift</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopiera efter avgift</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopiera bytes</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopiera prioritet</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopiera växel</translation>
</message>
<message>
<source>using</source>
<translation>använder</translation>
</message>
<message>
<source>anonymous funds</source>
<translation>anonyma medel</translation>
</message>
<message>
<source>(darksend requires this amount to be rounded up to the nearest %1).</source>
<translation>(darksend kräver att denna mängd avrundas uppåt till närmaste %1)</translation>
</message>
<message>
<source>any available funds (not recommended)</source>
<translation>vilka tillgängliga medel som helst (rekommenderas inte)</translation>
</message>
<message>
<source>and InstantX</source>
<translation>och InstantX</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 till %2</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Är du säker på att du vill skicka?</translation>
</message>
<message>
<source>are added as transaction fee</source>
<translation>läggs till som en transaktionsavgift</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Bekräfta myntsändning</translation>
</message>
<message>
<source>The recipient address is not valid, please recheck.</source>
<translation>Mottagaradressen är inte giltig, vänligen kontrollera igen.</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>Betalningsmängden måste vara större än 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>Mängden överstiger ditt saldo.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Totalsumman överstiger ditt saldo när transaktionsavgiften %1 inkluderas.</translation>
</message>
<message>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Dubblettadress hittad, kan endast skicka till en adress åt gången vid varje sändningshandling.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Transaktionsskapandet misslyckades!</translation>
</message>
<message>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Transaktionen avslogs! Detta kan hända om några av mynten i din plånbok redan har spenderats, t.ex. om du har använt en kopia av wallet.dat och mynt spenderades i kopian men inte har markerats som spenderade här.</translation>
</message>
<message>
<source>Error: The wallet was unlocked only to anonymize coins.</source>
<translation>Fel: Plånboken låstes upp enbart för att anonymisera mynt.</translation>
</message>
<message>
<source>Warning: Invalid Unio address</source>
<translation>Varning: Ogiltig Unio-adress</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation>Varning: Okänd växeladress</translation>
</message>
<message>
<source>(no label)</source>
<translation>(Ingen etikett)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>This is a normal payment.</source>
<translation>Detta är en vanlig betalning.</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>Betala &Till:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Välj en tidigare använd adress</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Klistra in adressen från urklippet</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Ta bort denna inmatning</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etikett:</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Ange en etikett för denna adress att läggas till i listan för använda adresser</translation>
</message>
<message>
<source>A&mount:</source>
<translation>M&ängd:</translation>
</message>
<message>
<source>Message:</source>
<translation>Meddelande:</translation>
</message>
<message>
<source>A message that was attached to the unio: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Unio network.</source>
<translation>Ett meddelande som bifogades till Unio: URI vilket kommer att lagras med transaktionen så att du vet. Observera: Meddelandet kommer inte att skickas över Unio-nätverket.</translation>
</message>
<message>
<source>This is an unverified payment request.</source>
<translation>Detta är en obekräftad betalningsbegäran.</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Betala Till:</translation>
</message>
<message>
<source>Memo:</source>
<translation>PM:</translation>
</message>
<message>
<source>This is a verified payment request.</source>
<translation>Detta är en bekräftad betalningsbegäran.</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Ange en etikett för denna adress för att lägga till den i din adressbok</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Unio Core is shutting down...</source>
<translation>Unio Core stängs ned...</translation>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Stäng inte av datorn förrän detta fönster försvinner.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signaturer - Signera/bekräfta ett Meddelande</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Signera Meddelande</translation>
</message>
<message>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Du kan signera meddelanden med dina adresser för att bevisa att du äger dem. Var försiktig med att inte skriva på någonting oklart då phishing-attacker kan försöka lura dig till att skriva över din identitet till dem. Signera endast väldetaljerade uppgifter du samtycker till.</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Välj en tidigare använd adress</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Klistra in adressen från urklippet</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Skriv in meddelandet du vill signera här</translation>
</message>
<message>
<source>Signature</source>
<translation>Signatur</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopiera den nuvarande valda signaturen till systemurklippet</translation>
</message>
<message>
<source>Sign the message to prove you own this Unio address</source>
<translation>Signera meddelandet för att bevisa att du äger denna Unio-adress</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Signera &Meddelande</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Återställ alla fält för signaturmeddelanden</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Rensa &alla</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Bekräfta Meddelande</translation>
</message>
<message>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Skriv in signeringsadressen, meddelandet, (försäkra dig om att du kopierar linjeavbrott, mellanslag, flikar med mera) och signera nedtill för att verifiera meddelandet. Var försiktig med att inte läsa in mer i signaturen än vad som finns i det signerade meddelandet för att undvika att bli lurad av en mellanhandattack.</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Unio address</source>
<translation>Bekräfta meddelandet för att försäkra dig om att den signerades med den angivna Unio-adressen</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Bekräfta &Meddelande</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Återställ alla fält för bekräftelsemeddelanden</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Klicka på "Signera Meddelande" för att generera en signatur</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Den angivna adressen är ogiltig.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Vänligen kontrollera adressen och försök igen.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Den angivna adressen refererar inte till en nyckel.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Plånboksupplåsningen avbröts.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Privatnyckeln för den inmatade adressen är inte tillgänglig.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Meddelandesignering misslyckades.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Meddelandet signerades.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>Signaturen kunde inte avkodas.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Vänligen kontrollera signaturen och försök igen.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>Signaturen överensstämde inte med meddelandesammandraget.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Meddelandebekräftelsen misslyckades.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Meddelandet bekräftades.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Unio Core</source>
<translation>Unio Core
</translation>
</message>
<message>
<source>Version %1</source>
<translation>Version %1</translation>
</message>
<message>
<source>The Bitcoin Core developers</source>
<translation>Bitcoin Core-utvecklarna</translation>
</message>
<message>
<source>The Unio Core developers</source>
<translation>Unio Core-utvecklarna</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Öppen till %1</translation>
</message>
<message>
<source>conflicted</source>
<translation>konflikterad</translation>
</message>
<message>
<source>%1/offline (verified via instantx)</source>
<translation>%1/offline (bekräftad genom instantx)</translation>
</message>
<message>
<source>%1/confirmed (verified via instantx)</source>
<translation>%1/bekräftad (bekräftad genom instantx)</translation>
</message>
<message>
<source>%1 confirmations (verified via instantx)</source>
<translation>%1/bekräftelser (bekräftad genom instantx)</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/obekräftade</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 bekräftelser</translation>
</message>
<message>
<source>%1/offline (InstantX verification in progress - %2 of %3 signatures)</source>
<translation>%1/offline (InstantX-bekräftelse under behandling - %2 av %3 signaturer)</translation>
</message>
<message>
<source>%1/confirmed (InstantX verification in progress - %2 of %3 signatures )</source>
<translation>%1/bekräftad (InstantX-bekräftelse under behandling - %2 av %3 signaturer)</translation>
</message>
<message>
<source>%1 confirmations (InstantX verification in progress - %2 of %3 signatures)</source>
<translation>%1 bekräftelser (InstantX-bekräftelse under behandling - %2 av %3 signaturer)</translation>
</message>
<message>
<source>%1/offline (InstantX verification failed)</source>
<translation>%1/offline (InstantX-bekräftelse misslyckades)</translation>
</message>
<message>
<source>%1/confirmed (InstantX verification failed)</source>
<translation>%1/bekräftad (InstantX-bekräftelse misslyckades)</translation>
</message>
<message>
<source>Status</source>
<translation>Status</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>,har ännu inte framgångsrikt utsänts.</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Source</source>
<translation>Källa</translation>
</message>
<message>
<source>Generated</source>
<translation>Genererad</translation>
</message>
<message>
<source>From</source>
<translation>Från</translation>
</message>
<message>
<source>unknown</source>
<translation>okänd</translation>
</message>
<message>
<source>To</source>
<translation>Till</translation>
</message>
<message>
<source>own address</source>
<translation>egen adress</translation>
</message>
<message>
<source>label</source>
<translation>etikett</translation>
</message>
<message>
<source>Credit</source>
<translation>Kredit</translation>
</message>
<message>
<source>not accepted</source>
<translation>inte accepterad</translation>
</message>
<message>
<source>Debit</source>
<translation>Debet</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Transaktionsavgift</translation>
</message>
<message>
<source>Net amount</source>
<translation>Nettomängd</translation>
</message>
<message>
<source>Message</source>
<translation>Meddelande</translation>
</message>
<message>
<source>Comment</source>
<translation>Kommentar</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>Transaktions-ID</translation>
</message>
<message>
<source>Merchant</source>
<translation>Handlare</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Genererade mynt måste vänta %1 block innan de kan användas. När du genererade detta block utsändes det till nätverket för att läggas till i blockkedjan. Om blocket inte kommer in i kedjan kommer dess tillstånd att ändras till "ej accepterad" och kommer inte att kunna spenderas. Detta kan ibland hända om en annan nod genererar ett block inom ett par sekunder från ditt.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Avsökningsinformation</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transaktion</translation>
</message>
<message>
<source>Inputs</source>
<translation>Indatan</translation>
</message>
<message>
<source>Amount</source>
<translation>Mängd</translation>
</message>
<message>
<source>true</source>
<translation>sant</translation>
</message>
<message>
<source>false</source>
<translation>falskt</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Transaktionsdetaljer</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Den här panelen visar en detaljerad transaktionsbeskrivning</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Address</source>
<translation>Adress</translation>
</message>
<message>
<source>Open until %1</source>
<translation>Öppen till %1</translation>
</message>
<message>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Obekräftad</translation>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Bekräftar (%1 of %2 rekommenderade bekräftelser)</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Bekräftat (%1 bekräftelser)</translation>
</message>
<message>
<source>Conflicted</source>
<translation>Konflikterad</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Omogen (%1 bekräftelser, kommer att bli tillgänglig efter %2)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Det här blocket togs inte emot av några andra noder och kommer troligtvis inte att accepteras!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Genererad men inte accepterad</translation>
</message>
<message>
<source>Received with</source>
<translation>Mottagen med</translation>
</message>
<message>
<source>Received from</source>
<translation>Mottagen från</translation>
</message>
<message>
<source>Received via Darksend</source>
<translation>Mottagen genom Darksend</translation>
</message>
<message>
<source>Sent to</source>
<translation>Skickad till</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Betalning till dig själv</translation>
</message>
<message>
<source>Mined</source>
<translation>Utvunnen</translation>
</message>
<message>
<source>Darksend Denominate</source>
<translation>Darksend-denomination</translation>
</message>
<message>
<source>Darksend Collateral Payment</source>
<translation>Darskends-säkerhetsbetalning</translation>
</message>
<message>
<source>Darksend Make Collateral Inputs</source>
<translation>Darksend-skapa säkerhetsinmatningar</translation>
</message>
<message>
<source>Darksend Create Denominations</source>
<translation>Darksend-skapa denominationer</translation>
</message>
<message>
<source>Darksent</source>
<translation>Darksent-(skickat)</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(e/t)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transaktionsstatus. Håll muspekaren över detta fält för att se bekräftelseantal.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Datum och tid då transaktionen mottogs.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Transaktionstyp.</translation>
</message>
<message>
<source>Destination address of transaction.</source>
<translation>Transaktionens destinationsadress.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Mängd draget eller tillagt till saldot.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Alla</translation>
</message>
<message>
<source>Today</source>
<translation>Idag</translation>
</message>
<message>
<source>This week</source>
<translation>Denna vecka</translation>
</message>
<message>
<source>This month</source>
<translation>Denna månad</translation>
</message>
<message>
<source>Last month</source>
<translation>Förra månaden</translation>
</message>
<message>
<source>This year</source>
<translation>Detta år</translation>
</message>
<message>
<source>Range...</source>
<translation>Period...</translation>
</message>
<message>
<source>Received with</source>
<translation>Mottagen med</translation>
</message>
<message>
<source>Sent to</source>
<translation>Skickad till</translation>
</message>
<message>
<source>Darksent</source>
<translation>Darksent-(skickat)</translation>
</message>
<message>
<source>Darksend Make Collateral Inputs</source>
<translation>Darksend-skapa säkerhetsinmatningar</translation>
</message>
<message>
<source>Darksend Create Denominations</source>
<translation>Darksend-skapa denominationer</translation>
</message>
<message>
<source>Darksend Denominate</source>
<translation>Darksend-denomination</translation>
</message>
<message>
<source>Darksend Collateral Payment</source>
<translation>Darskends-säkerhetsbetalning</translation>
</message>
<message>
<source>To yourself</source>
<translation>Till dig själv</translation>
</message>
<message>
<source>Mined</source>
<translation>Utvunnen</translation>
</message>
<message>
<source>Other</source>
<translation>Andra</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Skriv in en adress eller etikett för att söka</translation>
</message>
<message>
<source>Min amount</source>
<translation>Minsta mängd</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopiera adress</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopiera etikett</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopiera mängd</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopiera transaktions-ID</translation>
</message>
<message>
<source>Edit label</source>
<translation>Redigera etikett</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Visa transaktionsdetaljer</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Exportera Transaktionshistoriken</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommaseparerad fil (*. csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bekräftad</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Label</source>
<translation>Etikett</translation>
</message>
<message>
<source>Address</source>
<translation>Adress</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exporteringen misslyckades</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Det inträffade ett fel vid försöket med att spara transaktionshistoriken till %1.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Exporteringen lyckades</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>Transaktionshistoriken sparades framgångsrikt till %1.</translation>
</message>
<message>
<source>Range:</source>
<translation>Period:</translation>
</message>
<message>
<source>to</source>
<translation>till</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>Ingen plånbok har fyllts på.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Skicka mynt</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Exportera</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportera datan i den nuvarande fliken till en fil</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Säkerhetskopiera Plånbok</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Plånboksdata (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Säkerhetskopieringen misslyckades</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>Det inträffade ett fel vid försöket att spara plånboksdatan till %1.</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Säkerhetskopiering lyckades</translation>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation>Plånbokens data sparades utan problem till %1.</translation>
</message>
</context>
<context>
<name>unio-core</name>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Sammanbind till fastställd adress och avlyssna alltid den. Använd [host]:port-notation för IPv6</translation>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. Unio Core is probably already running.</source>
<translation>Kan inte erhålla ett lås på datakatalog %s. Unio Core körs förmodligen redan.</translation>
</message>
<message>
<source>Darksend uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source>
<translation>Darksend använder exakta denominationsmängder för att skicka medel, du kanske måste anonymisera fler mynt.</translation>
</message>
<message>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation>Gå in i regressionstestläget, vilken använder en särskild kedja i vilken block kan lösas direkt.</translation>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation>Fel: Lyssnande på inkommande anslutningar misslyckades (avlyssna återkommande fel %s)</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Exekvera kommandot när ett viktigt larm mottas eller vi ser en jättelång förgrening (%s i cmd ersätts av ett meddelande)</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Exekvera kommandot när en plånbokstransaktion ändras (%s i cmd ersätts av TxID) </translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Exekvera kommandot när det bästa blocket ändras (%s i cmd ersätts av blockhash)</translation>
</message>
<message>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation>I detta läge kontrollerar -genproclimit hur många block som genereras omedelbart.</translation>
</message>
<message>
<source>InstantX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source>
<translation>InstantX kräver indatan med åtminstone 6 bekräftelser. Du kanske måste vänta ett par minuter och försöka igen.</translation>
</message>
<message>
<source>Name to construct url for KeePass entry that stores the wallet passphrase</source>
<translation>Namnge för att skapa en url för en KeePass-inmatning som lagrar plånbokslösenfrasen.</translation>
</message>
<message>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation>Förfrågan till peer-adresser via DNS-lookup, om det är brist på adresser (standardvärde:1 unless -connect)</translation>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Ställ in en maximal storlek för högprioriterade/lågavgiftstransaktioner i byte (standard: %d)</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Ställ in antalet skriptbekräftelsetrådar till (%u till %d, 0 = auto, <0 = lämna så många kärnor fria, standard: %d)</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Detta är en förhandsutgiven testkompilering - använd på egen risk - använd inte för utvinning eller handlarapplikationer.</translation>
</message>
<message>
<source>Unable to bind to %s on this computer. Unio Core is probably already running.</source>
<translation>Det går inte att binda till %s till denna dator. Unio Core körs förmodligen redan.</translation>
</message>
<message>
<source>Unable to locate enough Darksend denominated funds for this transaction.</source>
<translation>Kunde inte hitta tillräckliga Darksend-denominationsmedel för denna transaktion.</translation>
</message>
<message>
<source>Unable to locate enough Darksend non-denominated funds for this transaction that are not equal 1000 UNIO.</source>
<translation>Kunde inte hitta tillräckliga Darksend-icke-denominationsmedel för denna transaktion som inte är likvärdiga 1000 UNIO.</translation>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Varning: -paytxfee är väldigt högt satt! Detta är transaktionsavgiften du kommer att få betala om du skickar en transaktion.</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Varning: Nätverket verkar inte hålla med helt och hållet! Några utvinnare verkar uppleva problem.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Varning: Vi verkar inte överensstämma med våra peers! Du kanske måste uppgradera eller så måste andra noder uppgradera.</translation>
</message>
<message>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Varning: Fel vid läsning av wallet.dat! Alla nycklar lästes korrekt men transaktionsdatan eller adressbokposterna kanske saknas eller är felaktiga.</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Varning: wallet.dat är korrumperad, data räddad! Den ursprungliga wallet.dat är sparad som wallet.{timestamp}.bak i %s; om ditt saldo eller transaktioner är felaktiga kanske du måste återställa från en säkerhetskopia.</translation>
</message>
<message>
<source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source>
<translation>Du måste specificera en masternodepriv-nyckel i konfigurationen. Vänligen se dokumentationen för hjälp.</translation>
</message>
<message>
<source>(default: 1)</source>
<translation>(standardvärde: 1)</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Acceptera kommandorad och JSON-RPC-kommandon</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Acceptera anslutningar utifrån (standardvärde: 1 om ingen -proxy eller -connect)</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Lägg till en nod att ansluta till och försök att hålla anslutningen öppen</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Tillåt DNS-lookup för -addnode, -seednode och -connect</translation>
</message>
<message>
<source>Already have that input.</source>
<translation>Har redan den indatan.</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Försök att återskapa privatnycklar från en korrumperad wallet.dat</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Blockskapandealternativ:</translation>
</message>
<message>
<source>Can't denominate: no compatible inputs left.</source>
<translation>Kan inte denominera: Inga kompatibla indatan kvar.</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Kan inte nedgradera plånboken</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Kan inte lösa -bind address: '%s'</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Kan inte lösa -externalip address: '%s'</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Kan inte skriva standardadress</translation>
</message>
<message>
<source>Collateral not valid.</source>
<translation>Säkerhetsåtgärd ej giltig.</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Anslut endast till specifik(a) nod(er)</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Anslut till en nod för att återfå peer-adresser och koppla från</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Anslutningsalternativ:</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Korrumperad blockdatabas upptäcktes</translation>
</message>
<message>
<source>Darksend options:</source>
<translation>Darksend-alternativ:</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation>Avsöknings-/testalternativ:</translation>
</message>
<message>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Upptäck din egen IP-adress (standardvärde: 1 vid avlyssning och no -externalip)</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>Ladda inte plånboken och inaktivera plånboks-RPC-anrop</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Vill du återuppbygga blockdatabasen nu?</translation>
</message>
<message>
<source>Done loading</source>
<translation>Laddning färdig</translation>
</message>
<message>
<source>Entries are full.</source>
<translation>Inmatningarna är fyllda.</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Fel vid initialisering av blockadatabas</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Fel vid initialisering av plånbokdatabasmiljö %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Fel vid laddning av blockdatabas</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Fel vid laddning av wallet.dat</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Fel vid laddning av wallet.dat: Plånboken är korrumperad</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Fel vid öppnande av blockdatabas</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Fel vid läsning från databas, stänger ned.</translation>
</message>
<message>
<source>Error recovering public key.</source>
<translation>Fel vid återhämtning av publik nyckel.</translation>
</message>
<message>
<source>Error</source>
<translation>Fel</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Fel: Diskutrymmet är lågt!</translation>
</message>
<message>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Fel: Plånbok låst, kan inte skapa en transaktion!</translation>
</message>
<message>
<source>Error: You already have pending entries in the Darksend pool</source>
<translation>Fel: Du har redan väntande inmatningar i Darksend-poolen</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Kunde inte avlyssna någon port. Använd -listen=0 om du vill detta.</translation>
</message>
<message>
<source>Failed to read block</source>
<translation>Kunde inte läsa block</translation>
</message>
<message>
<source>If <category> is not supplied, output all debugging information.</source>
<translation>Om <category> inte finns, lägg ut all avsökningsinformation.</translation>
</message>
<message>
<source>Found unconfirmed denominated outputs, will wait till they confirm to continue.</source>
<translation>Hittade obekräftade denominationsutdatan, kommer att vänta tills de bekräftar att fortsätta.</translation>
</message>
<message>
<source>Importing...</source>
<translation>Importerar...</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importerar block från den externa blok000??.dat-fil-en</translation>
</message>
<message>
<source>Incompatible mode.</source>
<translation>Inkompatibelt läge.</translation>
</message>
<message>
<source>Incompatible version.</source>
<translation>Inkompatibel version.</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Felaktig eller så hittades inget Genesis-block. Fel datadir för nätverket?</translation>
</message>
<message>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<source>Initialization sanity check failed. Unio Core is shutting down.</source>
<translation>Initialiseringstillståndkontroll misslyckades. Unio Core stängs ned.</translation>
</message>
<message>
<source>Input is not valid.</source>
<translation>Indata är inte giltig.</translation>
</message>
<message>
<source>InstantX options:</source>
<translation>InstantX-alternativ:</translation>
</message>
<message>
<source>Insufficient funds.</source>
<translation>Otillräckliga medel!</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Ogiltig -onion-adress: '%s'</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Ogiltig -proxy-adress: '%s'</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Ogiltig mängd för -minrelaytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Ogiltig mängd för -mintxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Ogiltig mängd för -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid masternodeprivkey. Please see documenation.</source>
<translation>Ogiltig masternodepriv-nyckel. Vänligen se dokumentationen.</translation>
</message>
<message>
<source>Invalid private key.</source>
<translation>Ogiltig privatnyckel.</translation>
</message>
<message>
<source>Invalid script detected.</source>
<translation>Ogiltigt skript hittades.</translation>
</message>
<message>
<source>KeePassHttp id for the established association</source>
<translation>KeePassHttp-id för den etablerade kopplingen</translation>
</message>
<message>
<source>KeePassHttp key for AES encrypted communication with KeePass</source>
<translation>KeePassHttp-nyckel för AES-krypterad kommunikation med KeePass</translation>
</message>
<message>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation>Bibehåll som mest <n> icke-anslutningsbara transaktioner i minnet (standardvärde: %u)</translation>
</message>
<message>
<source>Last Darksend was too recent.</source>
<translation>Senaste Darksend gjordes för inte alltför länge sedan.</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Laddar adresser...</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Laddar blockindex...</translation>
</message>
<message>
<source>Loading wallet... (%3.2f %%)</source>
<translation>Laddar plånbok... (%3.2f %%)</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Laddar plånbok...</translation>
</message>
<message>
<source>Masternode options:</source>
<translation>Masternode-alternativ:</translation>
</message>
<message>
<source>Masternode queue is full.</source>
<translation>Masternode-kön är uppfylld.</translation>
</message>
<message>
<source>Masternode:</source>
<translation>Masternode:</translation>
</message>
<message>
<source>Missing input transaction information.</source>
<translation>Indatatransaktionsinformation fattas.</translation>
</message>
<message>
<source>No funds detected in need of denominating.</source>
<translation>Inga medel hittades som är i behov denominering.</translation>
</message>
<message>
<source>No matching denominations found for mixing.</source>
<translation>Inga matchande denominationer hittades för mixning.</translation>
</message>
<message>
<source>Non-standard public key detected.</source>
<translation>Icke-standard publik nyckel hittades.</translation>
</message>
<message>
<source>Not compatible with existing transactions.</source>
<translation>Inte kompatibel med nuvarande transaktioner.</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Inte tillräckligt många tillgängliga fildeskriptorer.</translation>
</message>
<message>
<source>Options:</source>
<translation>Alternativ:</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Lösenord för JSON-RPC-anslutningar</translation>
</message>
<message>
<source>RPC SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>RPC SSL-alternativ: (Se Bitcoin Wiki för SSL-inställningsinstruktioner)</translation>
</message>
<message>
<source>RPC server options:</source>
<translation>RPC-serveralternativ:</translation>
</message>
<message>
<source>Randomly drop 1 of every <n> network messages</source>
<translation>Släpp 1 av varje <n> nätverksmeddelande slumpmässigt</translation>
</message>
<message>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation>Ludda 1 av varje <n> nätverksmeddelande slumpmässigt</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Återuppbygg blockkedjeindexet från den aktuella blk000??.dat-filen</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Omskanna blockkedjan efter försvunna plånbokstransaktioner</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Omskannar...</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Kör i bakgrunden som daemon och acceptera kommandon</translation>
</message>
<message>
<source>Session not complete!</source>
<translation>Sessionen är inte fullständig!</translation>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Ställ in databascachens storlek i megabytes (%d till %d, standardvärde: %d)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Ställ in maximal blockstorlek i bytes (standardvärde: %d)</translation>
</message>
<message>
<source>Set the masternode private key</source>
<translation>Ställ in masternodprivatnyckeln</translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Visa alla avsökningsalternativ (usage: --help -help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Förminska debug.log-filen vid klientuppstart (standardvärde 1 vid ingen -debug)</translation>
</message>
<message>
<source>Signing failed.</source>
<translation>Signering misslyckades.</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Transaktionssigneringen misslyckades</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Specificera datakatalog</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Specificera plånboksfil (inom datakatologen)</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Specificera din egen publika adress</translation>
</message>
<message>
<source>This help message</source>
<translation>Detta hjälpmeddelande</translation>
</message>
<message>
<source>This is intended for regression testing tools and app development.</source>
<translation>Detta är ämnat för regressionstestverktyg och apputveckling.</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Transaktionsmängden är för liten</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Transaktionsmängder måste vara positiva</translation>
</message>
<message>
<source>Transaction created successfully.</source>
<translation>Transaktionen skapades utan problem.</translation>
</message>
<message>
<source>Transaction fees are too high.</source>
<translation>Transaktionsavgifter är för höga.</translation>
</message>
<message>
<source>Transaction not valid.</source>
<translation>Transaktionen är inte giltig.</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transaktionen är för stor</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation>Kan inte binda %s till denna dator (bindande återgav ett fel %s)</translation>
</message>
<message>
<source>Unable to sign spork message, wrong key?</source>
<translation>Kan inte sporka meddelandet, fel nyckel?</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Okänt specificerat nätverk i -onlynet: '%s'</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Uppgradera plånboken till det senaste formatet</translation>
</message>
<message>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Använd OpenSSL (https) för JSON-RPC-anslutningar</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Använd UPnP för att kartlägga avlyssningsporten (standardvärde: 1 vid avlyssning)</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Använd testnätverket</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Användarnamn för JSON-RPC-anslutningar</translation>
</message>
<message>
<source>Value more than Darksend pool maximum allows.</source>
<translation>Värdera mer än vad Darksends poolmaximum tillåter.</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Bekräftar block...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Bekräftar plånbok...</translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Plånboken %s återfinns utanför datakatalogen %s</translation>
</message>
<message>
<source>Wallet is locked.</source>
<translation>Plånboken är låst.</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Plånboksalternativ:</translation>
</message>
<message>
<source>Warning</source>
<translation>Varning</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Varning: Versionen är förlegad, uppgradering krävs!</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Du måste återuppbygga databasen med -reindex för att ändra -txindex</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Zappar alla transaktioner från plånboken...</translation>
</message>
<message>
<source>on startup</source>
<translation>vid uppstart</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat är korrumperad, räddning misslyckades</translation>
</message>
</context>
</TS> | <message> |
resnet.py | from __future__ import absolute_import
from torch import nn
from torch.nn import functional as F
from torch.nn import init
import torchvision
import torch
import pdb
from .layers import (
SpatialAttention2d,
WeightedSum2d)
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152']
class ResNet(nn.Module):
__factory = {
18: torchvision.models.resnet18,
34: torchvision.models.resnet34,
50: torchvision.models.resnet50,
101: torchvision.models.resnet101,
152: torchvision.models.resnet152,
}
def __init__(self, depth, pretrained=True, cut_at_pooling=False, is_select=False,
num_features=0, norm=False, dropout=0, num_classes=0):
super(ResNet, self).__init__()
self.pretrained = pretrained
self.depth = depth
self.cut_at_pooling = cut_at_pooling
self.is_select = is_select
# Construct base (pretrained) resnet
if depth not in ResNet.__factory:
raise KeyError("Unsupported depth:", depth)
resnet = ResNet.__factory[depth](pretrained=pretrained)
resnet.layer4[0].conv2.stride = (1,1)
resnet.layer4[0].downsample[0].stride = (1,1)
self.base = nn.Sequential(
resnet.conv1, resnet.bn1, resnet.maxpool, # no relu
resnet.layer1, resnet.layer2, resnet.layer3, resnet.layer4)
self.gap = nn.AdaptiveAvgPool2d(1)
if not self.cut_at_pooling:
self.num_features = num_features
self.norm = norm
self.dropout = dropout
self.has_embedding = num_features > 0 # false
self.num_classes = num_classes
out_planes = resnet.fc.in_features
# Append new layers
if self.has_embedding: # false
self.feat = nn.Linear(out_planes, self.num_features)
self.feat_bn = nn.BatchNorm1d(self.num_features)
init.kaiming_normal_(self.feat.weight, mode='fan_out')
init.constant_(self.feat.bias, 0)
else: # 进入这里
# Change the num_features to CNN output channels
self.num_features = out_planes # out_planes = 2048 num_features 重新被赋值 2048
self.num_features_delg = 512
self.feat_bn = nn.BatchNorm1d(self.num_features_delg)
self.feat_bn.bias.requires_grad_(False)
if self.dropout > 0:
self.drop = nn.Dropout(self.dropout)
if self.num_classes > 0:
self.classifier = nn.Linear(self.num_features_delg, self.num_classes, bias=False)
init.normal_(self.classifier.weight, std=0.001)
## wangzy add attention
self.attention = SpatialAttention2d(in_c=self.num_features, act_fn='relu')
self.weightSum = WeightedSum2d()
init.constant_(self.feat_bn.weight, 1)
init.constant_(self.feat_bn.bias, 0)
if not pretrained:
self.reset_params()
def forward(self, x, feature_withbn=False):
x = self.base(x) # b x c x H x w C = 2048 即:32 2048 16 8
# 1*1 conv 512
original_fea = x
# x = self.gap(x)
# x = x.view(x.size(0), -1)
'''wangzy add attention'''
x, att_score = self.attention(x) # 32 1 16 8 比如说取前64个
# x torch.Size([32, 512, 16, 8]) att_score torch.Size([32, 1, 16, 8])
# print(att_score)
# x = self.weightSum([x,att_score])#回乘att_score分数
x = self.gap(x) # 32*512*1*1
# print('------------------------------------------------------------')
# print(x)
x = x.view(-1, x.size()[1]) # 32 512
features = x
# print("features:",features.shape)
# pdb.set_trace()
if self.cut_at_pooling: # False
return features
if self.has_embedding: # false
bn_x = self.feat_bn(self.feat(features))
else: # 进入这里
bn_x = self.feat_bn(features)
# print("training:", self.training) ### 不确定!
if self.training is False: ## 分情况 pretrain的时候 应该是 true target finetune 确定是 false
prob = self.classifier(bn_x)
bn_x = F.normalize(bn_x)
return bn_x, prob, original_fea, att_score ### !!!! finetune 的时候从这里 return
# return bn_x, self.feat_bn(original_fea), att_score ### !!!! finetune 的时候从这里 return
if self.norm: # False
bn_x = F.normalize(bn_x)
elif self.has_embedding:
bn_x = F.relu(bn_x)
if self.dropout > 0: # False
bn_x = self.drop(bn_x)
if self.num_classes > 0: # True
prob = self.classifier(bn_x)
else:
return x, bn_x
if feature_withbn: # False
return bn_x, prob
| #prob (16,12936)
#features (16,2048)
def reset_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm1d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal_(m.weight, std=0.001)
if m.bias is not None:
init.constant_(m.bias, 0)
resnet = ResNet.__factory[self.depth](pretrained=self.pretrained)
self.base[0].load_state_dict(resnet.conv1.state_dict())
self.base[1].load_state_dict(resnet.bn1.state_dict())
self.base[2].load_state_dict(resnet.maxpool.state_dict())
self.base[3].load_state_dict(resnet.layer1.state_dict())
self.base[4].load_state_dict(resnet.layer2.state_dict())
self.base[5].load_state_dict(resnet.layer3.state_dict())
self.base[6].load_state_dict(resnet.layer4.state_dict())
def resnet18(**kwargs):
return ResNet(18, **kwargs)
def resnet34(**kwargs):
return ResNet(34, **kwargs)
def resnet50(**kwargs):
return ResNet(50, **kwargs)
def resnet101(**kwargs):
return ResNet(101, **kwargs)
def resnet152(**kwargs):
return ResNet(152, **kwargs) | return features, prob, original_fea, att_score
#att_score (16,1,16,8)
#original_fea(16,2048,16,8) |
tasks.py | import functools
import datasets
import seqio
import t5
import tensorflow as tf
import promptsource.templates
from . import load_annotated_prompts, utils
# Tasks deemed as clean/useful
annotated_tasks = load_annotated_prompts.load_annotated_prompts()
CLEAN_TASKS = [t["dataset_subset_template"] for t in annotated_tasks if not t["skip_train"]]
CLEAN_EVAL_TASKS = [t["dataset_subset_template"] for t in annotated_tasks if t["do_eval"]]
EVAL_METRICS = {t["dataset_subset_template"]: t["metrics"] for t in annotated_tasks if t["do_eval"]}
# Datasets that don't work currently...
DATASET_BLACKLIST = [
("species_800", None),
("drop", None),
("discofuse", "discofuse-sport"),
("discofuse", "discofuse-wikipedia"),
("adversarial_qa", "adversarialQA"),
("tweet_eval", "emotion"),
("tweet_eval", "emoji"),
("tweet_eval", "hate"),
("tweet_eval", "offensive"),
("tweet_eval", "stance_atheism"),
("tweet_eval", "stance_abortion"),
("tweet_eval", "stance_feminist"),
("tweet_eval", "stance_climate"),
("tweet_eval", "sentiment"),
("tweet_eval", "stance_hillary"),
("tweet_eval", "irony"),
]
def strip_whitespace(output_or_target, example=None, is_target=False):
"""Cached tasks from promptsource all have a leading space on the ground-truth targets."""
return output_or_target.strip()
all_templates = promptsource.templates.TemplateCollection()
for dataset_name, subset_name in all_templates.keys:
if (dataset_name, subset_name) in DATASET_BLACKLIST:
continue
dataset_splits = utils.get_dataset_splits(dataset_name, subset_name)
templates = all_templates.get_dataset(dataset_name, subset_name)
for template_name in templates.all_template_names:
template = templates[template_name]
def dataset_fn(split, shuffle_files, seed, dataset_name, subset_name, template):
# HF datasets does not support file-level shuffling
|
task_name = utils.get_task_name(dataset_name, subset_name, template_name)
if task_name in CLEAN_EVAL_TASKS:
metrics = EVAL_METRICS[task_name]
else:
metrics = [t5.evaluation.metrics.sequence_accuracy]
seqio.TaskRegistry.add(
task_name,
seqio.FunctionDataSource(
functools.partial(
dataset_fn,
seed=None,
dataset_name=dataset_name,
subset_name=subset_name,
template=template,
),
splits=list(dataset_splits.keys()),
num_input_examples={s: dataset_splits[s].num_examples for s in dataset_splits.keys()},
),
preprocessors=[
seqio.preprocessors.tokenize,
seqio.preprocessors.append_eos,
seqio.CacheDatasetPlaceholder(required=False),
],
output_features={
"inputs": seqio.Feature(t5.data.get_default_vocabulary(), add_eos=False, dtype=tf.int32),
"targets": seqio.Feature(t5.data.get_default_vocabulary(), add_eos=True, dtype=tf.int32),
},
metric_fns=metrics,
postprocess_fn=strip_whitespace,
)
TASK_BLACKLIST = [
# Tasks which often tokenize to > 1024 tokens currently
"hotpot_qa_distractor_Generate_Explanations",
"hotpot_qa_fullwiki_Generate_Explanations",
"hotpot_qa_distractor_Generate_Answer_and_Explanations",
"hotpot_qa_fullwiki_Generate_Answer_and_Explanations",
"hotpot_qa_fullwiki_Generate_Answer",
"hotpot_qa_distractor_Generate_Answer",
"hotpot_qa_distractor_Generate_Title_2",
"hotpot_qa_fullwiki_Generate_Title_2",
"hotpot_qa_fullwiki_Generate_Title_1",
"hotpot_qa_distractor_Generate_Title_1",
"hotpot_qa_distractor_Generate_Question",
"hotpot_qa_fullwiki_Generate_Question",
"tab_fact_tab_fact_tab_fact_3",
"tab_fact_tab_fact_tab_fact_2",
"tab_fact_tab_fact_tab_fact_1",
"tab_fact_tab_fact_tab_fact_7",
"tab_fact_tab_fact_tab_fact_4",
"tab_fact_tab_fact_tab_fact_5",
"tab_fact_tab_fact_tab_fact_6",
"wiki_hop_masked_Choose_Best_Object_Candidate",
"wiki_hop_masked_Indirect_Question_about_Birthplace_Citizenship_Place_of_Death",
"narrativeqa_Template_05",
"ecthr_cases_alleged_violation_prediction_silver_rationales",
# Tasks with broken cached files
"gigaword_summarize_",
]
seqio.MixtureRegistry.add(
"all_tasks_combined_max_1m",
[task for task in seqio.TaskRegistry.names() if task not in TASK_BLACKLIST],
default_rate=functools.partial(seqio.mixing_rate_num_examples, maximum=1000000),
)
seqio.MixtureRegistry.add(
"all_super_glue_tasks",
[task for task in seqio.TaskRegistry.names() if task.startswith("super_glue")],
default_rate=seqio.mixing_rate_num_examples,
)
seqio.MixtureRegistry.add(
"clean_tasks",
[task for task in CLEAN_TASKS if task not in TASK_BLACKLIST],
default_rate=functools.partial(seqio.mixing_rate_num_examples, maximum=500_000),
)
seqio.MixtureRegistry.add(
"clean_eval_tasks",
[task for task in CLEAN_EVAL_TASKS if task not in TASK_BLACKLIST],
default_rate=functools.partial(seqio.mixing_rate_num_examples, maximum=500_000),
)
| del shuffle_files, seed
dataset = datasets.load_dataset(dataset_name, subset_name)
dataset = dataset[split]
dataset = utils.apply_template(dataset, template)
return utils.hf_dataset_to_tf_dataset(dataset) |
parse.py | # -*- coding: utf-8 -*-
"""
Created on Wed May 08 16:11:28 2013
@author: kshmirko
"""
import re
from ios.readMeteoBlock import readMeteoFile, readMeteoCtx
import StringIO
from datetime import datetime, timedelta
class ParserException(Exception):
def __init__(self, text):
super(ParserException, self).__init__(text)
regex = re.compile("(?P<stid>[0-9]+)([a-zA-Z\ \(\)]+)(?P<time>[0-9]+\w\ [0-9]+\ \w+\ [0-9]+)",re.IGNORECASE|re.UNICODE|re.DOTALL)
def parse_h2(line):
print line
r = regex.match(line).groupdict()
stid = int(r['stid'])
date = datetime.strptime(r['time'],'%HZ %d %b %Y')
print stid, date
return stid, date
| sfile = StringIO.StringIO(line)
meteo = readMeteoFile(sfile)
return meteo
def parse_pre2(line):
sfile = StringIO.StringIO(line)
ctx = readMeteoCtx(sfile)
return ctx
def parse_h3(line):
pass
def parse_observation(tags):
tmp = tags.pop()
if tmp.tag=='h2':
print "Header OK"
stid, date = parse_h2(tmp.text)
else:
raise ParserException("Can't parse string '%s'\n"%(tmp.text))
tmp = tags.pop()
if tmp.tag=='pre':
print "data OK"
meteo = parse_pre1(tmp.text)
else:
raise ParserException("Can't parse string '%s'\n"%(tmp.text))
tmp = tags.pop()
if tmp.tag=='h3':
print "Indices title OK"
parse_h3(tmp.text)
else:
raise ParserException("Can't parse string '%s'\n"%(tmp.text))
tmp = tags.pop()
if tmp.tag=='pre':
print "Indices OK"
ctx = parse_pre2(tmp.text)
else:
raise ParserException("Can't parse string '%s'\n"%(tmp.text))
return [stid, date, meteo, ctx] | def parse_pre1(line): |
cloud_model.go | package model
type Cloud struct {
CloudID string `json:"CloudID,omitempty" bson:"cloud_id"`
Endpoint string `json:"Endpoint,omitempty" bson:"endpoint"`
AccessKey string `json:"AccessKey,omitempty" bson:"access_key"`
SecretKey string `json:"SecretKey,omitempty" bson:"secret_key"`
StoragePrice float64 `json:"StoragePrice" bson:"storage_price"`
TrafficPrice float64 `json:"TrafficPrice" bson:"traffic_price"` | CloudName string `json:"CloudName" bson:"cloud_name"`
ProviderName string `json:"ProviderName" bson:"provider_name"`
Bucket string `json:"Bucket" bson:"bucket"`
}
type CloudController struct {
CloudID string `bson:"cloud_id" json:"CloudID"`
Cloud Cloud `bson:"cloud" json:"Cloud"`
VoteNum int `bson:"vote_num" json:"VoteNum"`
Address string `bson:"address" json:"Address"`
} | Availability float64 `json:"Availability" bson:"availability"`
Status string `json:"Status" bson:"status"` // "UP" | "DOWN"
Location string `json:"Location" bson:"location"` // "116.381252,39.906569"
Address string `json:"Address" bson:"address"` |
stop.go | package main
import (
"os"
"github.com/coreos/fleet/job"
"github.com/coreos/fleet/log"
)
var cmdStopUnit = &Command{
Name: "stop",
Summary: "Instruct systemd to stop one or more units in the cluster.",
Usage: "[--no-block|--block-attempts=N] UNIT...",
Description: `Stop one or more units from running in the cluster, but allow them to be
started again in the future.
Instructs systemd on the host machine to stop the unit, deferring to systemd
completely for any custom stop directives (i.e. ExecStop option in the unit
file).
For units which are not global, stop operations are performed synchronously,
which means fleetctl will block until it detects that the unit(s) have
transitioned to a stopped state. This behaviour can be configured with the
respective --block-attempts and --no-block options. Stop operations on global
units are always non-blocking.
Stop a single unit:
fleetctl stop foo.service
Stop an entire directory of units with glob matching, without waiting:
fleetctl --no-block stop myservice/*`,
Run: runStopUnit,
}
func init() {
cmdStopUnit.Flags.IntVar(&sharedFlags.BlockAttempts, "block-attempts", 0, "Wait until the units are stopped, performing up to N attempts before giving up. A value of 0 indicates no limit. Does not apply to global units.")
cmdStopUnit.Flags.BoolVar(&sharedFlags.NoBlock, "no-block", false, "Do not wait until the units have stopped before exiting. Always the case for global units.")
}
func | (args []string) (exit int) {
units, err := findUnits(args)
if err != nil {
stderr("%v", err)
return 1
}
stopping := make([]string, 0)
for _, u := range units {
if !suToGlobal(u) {
if job.JobState(u.CurrentState) == job.JobStateInactive {
stderr("Unable to stop unit %s in state %s", u.Name, job.JobStateInactive)
return 1
} else if job.JobState(u.CurrentState) == job.JobStateLoaded {
log.V(1).Infof("Unit(%s) already %s, skipping.", u.Name, job.JobStateLoaded)
continue
}
}
log.V(1).Infof("Setting target state of Unit(%s) to %s", u.Name, job.JobStateLoaded)
cAPI.SetUnitTargetState(u.Name, string(job.JobStateLoaded))
if suToGlobal(u) {
stdout("Triggered global unit %s stop", u.Name)
} else {
stopping = append(stopping, u.Name)
}
}
if !sharedFlags.NoBlock {
errchan := waitForUnitStates(stopping, job.JobStateLoaded, sharedFlags.BlockAttempts, os.Stdout)
for err := range errchan {
stderr("Error waiting for units: %v", err)
exit = 1
}
} else {
for _, name := range stopping {
stdout("Triggered unit %s stop", name)
}
}
return
}
| runStopUnit |
utils.py | # -*- coding: utf-8 -*-
"""Test utilities."""
#
# (C) Pywikibot team, 2013-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import inspect
import json
import locale
import os
import re
import subprocess
import sys
import tempfile
import time
import traceback
import warnings
from collections import Mapping
from types import ModuleType
from warnings import warn
from pywikibot.tools import PY2
if not PY2:
import six
import pywikibot
from pywikibot import config
from pywikibot.comms import threadedhttp
from pywikibot.data.api import CachedRequest, APIError
from pywikibot.data.api import Request as _original_Request
from pywikibot.site import Namespace
from pywikibot.tools import (
PYTHON_VERSION,
UnicodeType as unicode,
)
from tests import _pwb_py
from tests import unittest
from tests import unittest_print
OSWIN32 = (sys.platform == 'win32')
PYTHON_26_CRYPTO_WARN = ('Python 2.6 is no longer supported by the Python core '
'team, please upgrade your Python.')
WIN32_LOCALE_UPDATE = """
<gs:GlobalizationServices xmlns:gs="urn:longhornGlobalizationUnattend">
<gs:UserList>
<gs:User UserID="Current" CopySettingsToDefaultUserAcct="true"
CopySettingsToSystemAcct="true"/>
</gs:UserList>
<gs:UserLocale>
<gs:Locale Name="%s" SetAsCurrent="true" ResetAllSettings="false"/>
</gs:UserLocale>
</gs:GlobalizationServices>
"""
class DrySiteNote(RuntimeWarning):
"""Information regarding dry site."""
pass
def expected_failure_if(expect):
"""
Unit test decorator to expect failure under conditions.
@param expect: Flag to check if failure is expected
@type expect: bool
"""
if expect:
return unittest.expectedFailure
else:
return lambda orig: orig
def allowed_failure(func):
"""
Unit test decorator to allow failure.
Test runners each have different interpretations of what should be
the result of an @expectedFailure test if it succeeds. Some consider
it to be a pass; others a failure.
This decorator runs the test and, if it is a failure, reports the result
and considers it a skipped test.
"""
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except AssertionError:
tb = traceback.extract_tb(sys.exc_info()[2])
for depth, line in enumerate(tb):
if re.match('^assert[A-Z]', line[2]):
break
tb = traceback.format_list(tb[:depth])
pywikibot.error('\n' + ''.join(tb)[:-1]) # remove \n at the end
raise unittest.SkipTest('Test is allowed to fail.')
except Exception:
pywikibot.exception(tb=True)
raise unittest.SkipTest('Test is allowed to fail.')
wrapper.__name__ = func.__name__
return wrapper
def allowed_failure_if(expect):
"""
Unit test decorator to allow failure under conditions.
@param expect: Flag to check if failure is allowed
@type expect: bool
"""
if expect:
return allowed_failure
else:
return lambda orig: orig
def add_metaclass(cls):
"""Call six's add_metaclass with the site's __metaclass__ in Python 3."""
if not PY2:
return six.add_metaclass(cls.__metaclass__)(cls)
else:
assert cls.__metaclass__
return cls
def fixed_generator(iterable):
"""Return a dummy generator ignoring all parameters."""
def gen(*args, **kwargs):
for item in iterable:
yield item
return gen
def entered_loop(iterable):
"""Return True if iterable contains items."""
for iterable_item in iterable:
return True
return False
class FakeModule(ModuleType):
"""An empty fake module."""
@classmethod
def create_dotted(cls, name):
"""Create a chain of modules based on the name separated by periods."""
modules = name.split('.')
mod = None
for mod_name in modules[::-1]:
module = cls(str(mod_name))
if mod:
setattr(module, mod.__name__, mod)
mod = module
return mod
class WarningSourceSkipContextManager(warnings.catch_warnings):
"""
Warning context manager that adjusts source of warning.
The source of the warning will be moved further down the
stack to skip a list of objects that have been monkey
patched into the call stack.
"""
def __init__(self, skip_list):
"""
Constructor.
@param skip_list: List of objects to be skipped
@type skip_list: list of object or (obj, str, int, int)
"""
super(WarningSourceSkipContextManager, self).__init__(record=True)
self.skip_list = skip_list
@property
def skip_list(self):
"""
Return list of filename and line ranges to skip.
@rtype: list of (obj, str, int, int)
"""
return self._skip_list
@skip_list.setter
def skip_list(self, value):
"""
Set list of objects to be skipped.
@param value: List of objects to be skipped
@type value: list of object or (obj, str, int, int)
"""
self._skip_list = []
for item in value:
if isinstance(item, tuple):
self._skip_list.append(item)
else:
filename = inspect.getsourcefile(item)
code, first_line = inspect.getsourcelines(item)
last_line = first_line + len(code)
self._skip_list.append(
(item, filename, first_line, last_line))
def __enter__(self):
"""Enter the context manager."""
def detailed_show_warning(*args, **kwargs):
"""Replacement handler for warnings.showwarning."""
entry = warnings.WarningMessage(*args, **kwargs)
skip_lines = 0
entry_line_found = False
for (_, filename, fileno, _, line, _) in inspect.stack():
if any(start <= fileno <= end
for (_, skip_filename, start, end) in self.skip_list
if skip_filename == filename):
if entry_line_found:
continue
else:
skip_lines += 1
if (filename, fileno) == (entry.filename, entry.lineno):
if not skip_lines:
break
entry_line_found = True
if entry_line_found:
if not skip_lines:
(entry.filename, entry.lineno) = (filename, fileno)
break
else:
skip_lines -= 1
# Avoid failures because cryptography is mentioning Python 2.6
# is outdated
if PYTHON_VERSION < (2, 7):
if (isinstance(entry, DeprecationWarning) and
str(entry.message) == PYTHON_26_CRYPTO_WARN):
return
log.append(entry)
log = super(WarningSourceSkipContextManager, self).__enter__()
self._module.showwarning = detailed_show_warning
return log
class AssertAPIErrorContextManager(object):
"""
Context manager to assert certain APIError exceptions.
This is build similar to the L{unittest.TestCase.assertError} implementation
which creates an context manager. It then calls L{handle} which either
returns this manager if no executing object given or calls the callable
object.
"""
def __init__(self, code, info, msg, test_case):
"""Create instance expecting the code and info."""
self.code = code
self.info = info
self.msg = msg
self.test_case = test_case
def __enter__(self):
"""Enter this context manager and the unittest's context manager."""
self.cm = self.test_case.assertRaises(APIError, msg=self.msg)
self.cm.__enter__()
return self.cm
def __exit__(self, exc_type, exc_value, tb):
"""Exit the context manager and assert code and optionally info."""
result = self.cm.__exit__(exc_type, exc_value, tb)
assert result is isinstance(exc_value, APIError)
if result:
self.test_case.assertEqual(exc_value.code, self.code)
if self.info:
self.test_case.assertEqual(exc_value.info, self.info)
return result
def handle(self, callable_obj, args, kwargs):
"""Handle the callable object by returning itself or using itself."""
if callable_obj is None:
return self
with self:
callable_obj(*args, **kwargs)
class DryParamInfo(dict):
"""Dummy class to use instead of L{pywikibot.data.api.ParamInfo}."""
def __init__(self, *args, **kwargs):
"""Constructor."""
super(DryParamInfo, self).__init__(*args, **kwargs)
self.modules = set()
self.action_modules = set()
self.query_modules = set()
self.query_modules_with_limits = set()
self.prefixes = set()
def fetch(self, modules, _init=False):
"""Load dry data."""
return [self[mod] for mod in modules]
def parameter(self, module, param_name):
"""Load dry data."""
return self[module][param_name]
def __getitem__(self, name):
"""Return dry data or a dummy parameter block."""
try:
return super(DryParamInfo, self).__getitem__(name)
except KeyError:
return {'name': name, 'limit': None}
class DummySiteinfo(object):
"""Dummy class to use instead of L{pywikibot.site.Siteinfo}."""
def __init__(self, cache):
"""Constructor."""
self._cache = dict((key, (item, False)) for key, item in cache.items())
def __getitem__(self, key):
"""Get item."""
return self.get(key, False)
def __setitem__(self, key, value):
"""Set item."""
self._cache[key] = (value, False)
def get(self, key, get_default=True, cache=True, expiry=False):
"""Return dry data."""
# Default values are always expired, so only expiry=False doesn't force
# a reload
force = expiry is not False
if not force and key in self._cache:
loaded = self._cache[key]
if not loaded[1] and not get_default:
raise KeyError(key)
else:
return loaded[0]
elif get_default:
default = pywikibot.site.Siteinfo._get_default(key)
if cache:
self._cache[key] = (default, False)
return default
else:
raise KeyError(key)
def __contains__(self, key):
"""Return False."""
return False
def is_recognised(self, key):
"""Return None."""
return None
def get_requested_time(self, key):
"""Return False."""
return False
class DryRequest(CachedRequest):
"""Dummy class to use instead of L{pywikibot.data.api.Request}."""
def __init__(self, *args, **kwargs):
"""Constructor."""
_original_Request.__init__(self, *args, **kwargs)
@classmethod
def create_simple(cls, **kwargs):
"""Skip CachedRequest implementation."""
return _original_Request.create_simple(**kwargs)
def _expired(self, dt):
"""Never invalidate cached data."""
return False
def _write_cache(self, data):
"""Never write data."""
return
def submit(self):
"""Prevented method."""
raise Exception(u'DryRequest rejecting request: %r'
% self._params)
class DrySite(pywikibot.site.APISite):
"""Dummy class to use instead of L{pywikibot.site.APISite}."""
_loginstatus = pywikibot.site.LoginStatus.NOT_ATTEMPTED
def __init__(self, code, fam, user, sysop):
"""Constructor."""
super(DrySite, self).__init__(code, fam, user, sysop)
self._userinfo = pywikibot.tools.EMPTY_DEFAULT
self._paraminfo = DryParamInfo()
self._siteinfo = DummySiteinfo({})
self._siteinfo._cache['lang'] = (code, True)
self._siteinfo._cache['case'] = (
'case-sensitive' if self.family.name == 'wiktionary' else
'first-letter', True)
self._siteinfo._cache['mainpage'] = 'Main Page'
extensions = []
if self.family.name == 'wikisource':
extensions.append({'name': 'ProofreadPage'})
self._siteinfo._cache['extensions'] = (extensions, True)
aliases = []
for alias in ('PrefixIndex', ):
# TODO: Not all follow that scheme (e.g. "BrokenRedirects")
aliases.append({'realname': alias.capitalize(), 'aliases': [alias]})
self._siteinfo._cache['specialpagealiases'] = (aliases, True)
self._msgcache = {'*': 'dummy entry', 'hello': 'world'}
def _build_namespaces(self):
ns_dict = Namespace.builtin_namespaces(case=self.siteinfo['case'])
if hasattr(self.family, 'authornamespaces'):
assert len(self.family.authornamespaces[self.code]) <= 1
if self.family.authornamespaces[self.code]:
author_ns = self.family.authornamespaces[self.code][0]
assert author_ns not in ns_dict
ns_dict[author_ns] = Namespace(
author_ns, 'Author', case=self.siteinfo['case'])
return ns_dict
@property
def userinfo(self):
"""Return dry data."""
return self._userinfo
def version(self):
"""Dummy version, with warning to show the callers context."""
warn('%r returning version 1.24; override if unsuitable.'
% self, DrySiteNote, stacklevel=2)
return '1.24'
def image_repository(self):
"""Return Site object for image repository e.g. commons."""
code, fam = self.shared_image_repository()
if bool(code or fam):
return pywikibot.Site(code, fam, self.username(),
interface=self.__class__)
def data_repository(self):
"""Return Site object for data repository e.g. Wikidata."""
if self.hostname().endswith('.beta.wmflabs.org'):
# TODO: Use definition for beta cluster's wikidata
code, fam = None, None
fam_name = self.hostname().split('.')[-4]
else:
code, fam = 'wikidata', 'wikidata'
fam_name = self.family.name
# Only let through valid entries
if fam_name not in ('commons', 'wikibooks', 'wikidata', 'wikinews',
'wikipedia', 'wikiquote', 'wikisource',
'wikivoyage'):
code, fam = None, None
if bool(code or fam):
return pywikibot.Site(code, fam, self.username(),
interface=DryDataSite)
class DryDataSite(DrySite, pywikibot.site.DataSite):
"""Dummy class to use instead of L{pywikibot.site.DataSite}."""
def | (self):
namespaces = super(DryDataSite, self)._build_namespaces()
namespaces[0].defaultcontentmodel = 'wikibase-item'
namespaces[120] = Namespace(id=120,
case='first-letter',
canonical_name='Property',
defaultcontentmodel='wikibase-property')
return namespaces
class DryPage(pywikibot.Page):
"""Dummy class that acts like a Page but avoids network activity."""
_pageid = 1
_disambig = False
_isredir = False
def isDisambig(self):
"""Return disambig status stored in _disambig."""
return self._disambig
class FakeLoginManager(pywikibot.data.api.LoginManager):
"""Loads a fake password."""
@property
def password(self):
"""Get the fake password."""
return 'foo'
@password.setter
def password(self, value):
"""Ignore password changes."""
pass
class DummyHttp(object):
"""A class simulating the http module."""
def __init__(self, wrapper):
"""Constructor with the given PatchedHttp instance."""
self.__wrapper = wrapper
def request(self, *args, **kwargs):
"""The patched request method."""
result = self.__wrapper.before_request(*args, **kwargs)
if result is False:
result = self.__wrapper._old_http.request(*args, **kwargs)
elif isinstance(result, Mapping):
result = json.dumps(result)
elif not isinstance(result, unicode):
raise ValueError('The result is not a valid type '
'"{0}"'.format(type(result)))
response = self.__wrapper.after_request(result, *args, **kwargs)
if response is None:
response = result
return response
def fetch(self, *args, **kwargs):
"""The patched fetch method."""
result = self.__wrapper.before_fetch(*args, **kwargs)
if result is False:
result = self.__wrapper._old_http.fetch(*args, **kwargs)
elif not isinstance(result, threadedhttp.HttpRequest):
raise ValueError('The result is not a valid type '
'"{0}"'.format(type(result)))
response = self.__wrapper.after_fetch(result, *args, **kwargs)
if response is None:
response = result
return response
class PatchedHttp(object):
"""
A ContextWrapper to handle any data going through the http module.
This patches the C{http} import in the given module to a class simulating
C{request} and C{fetch}. It has a C{data} attribute which is either a
static value which the requests will return or it's a callable returning the
data. If it's a callable it'll be called with the same parameters as the
original function in the L{http} module. For fine grained control it's
possible to override/monkey patch the C{before_request} and C{before_fetch}
methods. By default they just return C{data} directory or call it if it's
callable.
Even though L{http.request} is calling L{http.fetch}, it won't call the
patched method.
The data returned for C{request} may either be C{False}, a C{unicode} or a
C{Mapping} which is converted into a json string. The data returned for
C{fetch} can only be C{False} or a L{threadedhttp.HttpRequest}. For both
variants any other types are not allowed and if it is False it'll use the
original method and do an actual request.
Afterwards it is always calling C{after_request} or C{after_fetch} with the
response and given arguments. That can return a different response too, but
can also return None so that the original response is forwarded.
"""
def __init__(self, module, data=None):
"""
Constructor.
@param module: The given module to patch. It must have the http module
imported as http.
@type module: Module
@param data: The data returned for any request or fetch.
@type data: callable or False (or other depending on request/fetch)
"""
super(PatchedHttp, self).__init__()
self._module = module
self.data = data
def _handle_data(self, *args, **kwargs):
"""Return the data after it may have been called."""
if self.data is None:
raise ValueError('No handler is defined.')
elif callable(self.data):
return self.data(*args, **kwargs)
else:
return self.data
def before_request(self, *args, **kwargs):
"""Return the value which should is returned by request."""
return self._handle_data(*args, **kwargs)
def before_fetch(self, *args, **kwargs):
"""Return the value which should is returned by fetch."""
return self._handle_data(*args, **kwargs)
def after_request(self, response, *args, **kwargs):
"""Handle the response after request."""
pass
def after_fetch(self, response, *args, **kwargs):
"""Handle the response after fetch."""
pass
def __enter__(self):
"""Patch the http module property."""
self._old_http = self._module.http
self._module.http = DummyHttp(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Reset the http module property."""
self._module.http = self._old_http
def is_simple_locale_with_region(locale):
"""Check if a locale is only an ISO and region code."""
# Some locale are unicode names, which are not valid
try:
lang, sep, qualifier = locale.partition('_')
except UnicodeDecodeError:
return False
if '-' in lang:
return False
# Only allow qualifiers that look like a country code, without any suffix
if qualifier and len(qualifier) == 2:
return True
else:
return False
def get_simple_locales():
"""Get list of simple locales."""
return [locale_code for locale_code in sorted(locale.locale_alias.keys())
if is_simple_locale_with_region(locale_code)]
def generate_locale(lang, region=True, encoding='utf8'):
"""
Generate a locale string.
@param lang: language code
@type lang: str
@param region: region code; if True, a random one will be used
@type region: str or True
@param encoding: encoding name
@type encoding: str
@rtype: str
"""
locale_prefix = lang + '_'
if region is True:
locales = get_simple_locales()
lang_locales = [code for code in locales
if code.startswith(locale_prefix)]
assert(lang_locales)
# Get a region from the first locale
lang, sep, region = lang_locales[0].partition('_')
assert lang and sep and region
if region:
locale_code = locale_prefix + region.upper()
else:
locale_code = lang
if encoding:
locale_code += '.' + encoding
return locale_code
def execute_with_temp_text_file(text, command, **kwargs):
"""
Perform command on a temporary file.
@param text: contents of temporary file
@type text: str
@param command: command to execute with {0} replaced with the filename
@type command: str
@param kwargs: parameters for tempfile.mkstemp/tempfile.NamedTemporaryFile,
such as prefix, suffix and dir
"""
options = {
'shell': True,
'stdout': subprocess.PIPE,
'stderr': subprocess.STDOUT,
}
# NamedTemporaryFile does not work correctly with win32_set_global_locale
# subprocess.Popen is a context handler in Python 3.2+
if OSWIN32 or PY2:
(fd, filename) = tempfile.mkstemp(text=True, **kwargs)
try:
os.close(fd)
with open(filename, 'wt') as f:
f.write(text)
command = command.format(filename)
p = subprocess.Popen(command, **options)
out = p.communicate()[0]
# Python 2 raises an exception when attempting to close the process
# Python 3 does not allow the file to be removed until the process
# has been closed
if not PY2:
p.terminate()
finally:
try:
os.remove(filename)
except OSError:
# As it is a temporary file, the OS should clean it up
unittest_print('Could not delete {0}'.format(filename))
else:
with tempfile.NamedTemporaryFile(mode='w+t', **kwargs) as f:
f.write(text)
f.flush()
command = command.format(f.name)
with subprocess.Popen(command, **options) as p:
out = p.communicate()[0]
if out:
unittest_print('command "{0}" output: {1}'.format(command, out))
def win32_set_global_locale(locale_code):
"""Set global locale on win32."""
locale_code = locale_code.split('.')[0]
win_locale_code = locale_code.replace('_', '-')
locale_update_xml = WIN32_LOCALE_UPDATE % win_locale_code
command = 'control.exe intl.cpl,,/f:"{0}"'
execute_with_temp_text_file(locale_update_xml, command, suffix='.xml')
actual_code = locale.getdefaultlocale()[0]
assert locale_code == actual_code, \
('locale code {0} not set; actual code is {1}'
.format(locale_code, actual_code))
def execute(command, data_in=None, timeout=0, error=None):
"""
Execute a command and capture outputs.
On Python 2.6 it adds an option to ignore the deprecation warning from
the cryptography package after the first entry of the command parameter.
@param command: executable to run and arguments to use
@type command: list of unicode
"""
if PYTHON_VERSION < (2, 7):
command.insert(
1, '-W ignore:{0}:DeprecationWarning'.format(PYTHON_26_CRYPTO_WARN))
# Any environment variables added on Windows must be of type
# str() on Python 2.
env = os.environ.copy()
# Python issue 6906
if PYTHON_VERSION < (2, 6, 6):
for var in ('TK_LIBRARY', 'TCL_LIBRARY', 'TIX_LIBRARY'):
if var in env:
env[var] = env[var].encode('mbcs')
# Prevent output by test package; e.g. 'max_retries reduced from x to y'
env[str('PYWIKIBOT_TEST_QUIET')] = str('1')
# sys.path may have been modified by the test runner to load dependencies.
pythonpath = os.pathsep.join(sys.path)
if OSWIN32 and PY2:
pythonpath = str(pythonpath)
env[str('PYTHONPATH')] = pythonpath
env[str('PYTHONIOENCODING')] = str(config.console_encoding)
# LC_ALL is used by i18n.input as an alternative for userinterface_lang
# A complete locale string needs to be created, so the country code
# is guessed, however it is discarded when loading config.
if config.userinterface_lang:
current_locale = locale.getdefaultlocale()[0]
if current_locale in [None, 'C']:
current_locale = 'en'
else:
current_locale = current_locale.split('.')[0]
locale_prefix = str(config.userinterface_lang + '_')
if not current_locale.startswith(locale_prefix):
locale_code = generate_locale(
config.userinterface_lang,
encoding=config.console_encoding)
env[str('LC_ALL')] = str(locale_code)
if OSWIN32:
# This is not multiprocessing safe, as it affects all processes
win32_set_global_locale(locale_code)
else:
current_locale = None
else:
current_locale = None
# Set EDITOR to an executable that ignores all arguments and does nothing.
env[str('EDITOR')] = str('call' if OSWIN32 else 'true')
options = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE
}
if data_in is not None:
options['stdin'] = subprocess.PIPE
try:
p = subprocess.Popen(command, env=env, **options)
except TypeError as e:
# Generate a more informative error
if OSWIN32 and PY2:
unicode_env = [(k, v) for k, v in os.environ.items()
if not isinstance(k, str) or
not isinstance(v, str)]
if unicode_env:
raise TypeError(
'%s: unicode in os.environ: %r' % (e, unicode_env))
child_unicode_env = [(k, v) for k, v in env.items()
if not isinstance(k, str) or
not isinstance(v, str)]
if child_unicode_env:
raise TypeError(
'%s: unicode in child env: %r' % (e, child_unicode_env))
raise
if data_in is not None:
p.stdin.write(data_in.encode(config.console_encoding))
p.stdin.flush() # _communicate() otherwise has a broken pipe
stderr_lines = b''
waited = 0
while (error or (waited < timeout)) and p.poll() is None:
# In order to kill 'shell' and others early, read only a single
# line per second, and kill the process as soon as the expected
# output has been seen.
# Additional lines will be collected later with p.communicate()
if error:
line = p.stderr.readline()
stderr_lines += line
if error in line.decode(config.console_encoding):
break
time.sleep(1)
waited += 1
if (timeout or error) and p.poll() is None:
p.kill()
if p.poll() is not None:
stderr_lines += p.stderr.read()
data_out = p.communicate()
if OSWIN32 and current_locale:
win32_set_global_locale(current_locale)
return {'exit_code': p.returncode,
'stdout': data_out[0].decode(config.console_encoding),
'stderr': (stderr_lines + data_out[1]).decode(config.console_encoding)}
def execute_pwb(args, data_in=None, timeout=0, error=None, overrides=None):
"""
Execute the pwb.py script and capture outputs.
@param args: list of arguments for pwb.py
@type args: list of unicode
@param overrides: mapping of pywikibot symbols to test replacements
@type overrides: dict
"""
command = [sys.executable]
if overrides:
command.append('-c')
overrides = '; '.join(
'%s = %s' % (key, value) for key, value in overrides.items())
command.append(
'import pwb; import pywikibot; %s; pwb.main()'
% overrides)
else:
command.append(_pwb_py)
return execute(command=command + args,
data_in=data_in, timeout=timeout, error=error)
| _build_namespaces |
4bit.rs | #![feature(used)]
#![no_std]
extern crate cortex_m_semihosting;
#[cfg(not(feature = "use_semihosting"))]
extern crate panic_abort;
#[cfg(feature = "use_semihosting")]
extern crate panic_semihosting;
extern crate cortex_m;
extern crate cortex_m_rt;
extern crate atsamd21_hal;
extern crate metro_m0;
use metro_m0::clock::GenericClockController;
use metro_m0::delay::Delay;
use metro_m0::{CorePeripherals, Peripherals};
extern crate hd44780_hal;
use hd44780_hal::HD44780;
extern crate embedded_hal;
fn main() {
let mut peripherals = Peripherals::take().unwrap();
let core = CorePeripherals::take().unwrap(); | let mut clocks = GenericClockController::new(
peripherals.GCLK,
&mut peripherals.PM,
&mut peripherals.SYSCTRL,
&mut peripherals.NVMCTRL,
);
let mut pins = metro_m0::pins(peripherals.PORT);
let delay = Delay::new(core.SYST, &mut clocks);
let mut lcd = HD44780::new_4bit(
pins.d4.into_open_drain_output(&mut pins.port), // Register Select pin
pins.d3.into_open_drain_output(&mut pins.port), // Enable pin
pins.d9.into_open_drain_output(&mut pins.port), // d4
pins.d10.into_open_drain_output(&mut pins.port), // d5
pins.d11.into_open_drain_output(&mut pins.port), // d6
pins.d12.into_open_drain_output(&mut pins.port), // d7
delay,
);
// Unshift display and set cursor to 0
lcd.reset();
// Clear existing characters
lcd.clear();
// Enable the display, enable cursor and blink the cursor
lcd.set_display_mode(true, true, true);
// Display the following string
lcd.write_str("Hello, world!");
// Move the cursor to the second line
lcd.set_cursor_pos(40);
// Display the following string on the second line
lcd.write_str("I'm on line 2!");
loop { }
} | |
integration.rs | use fungus::prelude::*;
#[test]
fn test_use_syntax() | {
let home = user::home_dir().unwrap();
assert_eq!(PathBuf::from(&home), sys::abs("~").unwrap());
} |
|
navigation.js | /**
* Handles toggling the navigation menu for small screens and
* accessibility for submenu items.
*/
( function() { | if ( ! nav ) {
return;
}
button = nav.getElementsByTagName( 'button' )[0];
menu = nav.getElementsByTagName( 'ul' )[0];
if ( ! button ) {
return;
}
// Hide button if menu is missing or empty.
if ( ! menu || ! menu.childNodes.length ) {
button.style.display = 'none';
return;
}
button.onclick = function() {
if ( -1 === menu.className.indexOf( 'nav-menu' ) ) {
menu.className = 'nav-menu';
}
if ( -1 !== button.className.indexOf( 'toggled-on' ) ) {
button.className = button.className.replace( ' toggled-on', '' );
menu.className = menu.className.replace( ' toggled-on', '' );
} else {
button.className += ' toggled-on';
menu.className += ' toggled-on';
}
};
} )();
// Better focus for hidden submenu items for accessibility.
( function( $ ) {
$( '.main-navigation' ).find( 'a' ).on( 'focus.twentytwelve blur.twentytwelve', function() {
$( this ).parents( '.menu-item, .page_item' ).toggleClass( 'focus' );
} );
if ( 'ontouchstart' in window ) {
$('body').on( 'touchstart.twentytwelve', '.menu-item-has-children > a, .page_item_has_children > a', function( e ) {
var el = $( this ).parent( 'li' );
if ( ! el.hasClass( 'focus' ) ) {
e.preventDefault();
el.toggleClass( 'focus' );
el.siblings( '.focus').removeClass( 'focus' );
}
} );
}
} )( jQuery ); | var nav = document.getElementById( 'site-navigation' ), button, menu; |
muxer.go | // Copyright 2020, Chef. All rights reserved.
// https://github.com/q191201771/lal
//
// Use of this source code is governed by a MIT-style license
// that can be found in the License file.
//
// Author: Chef ([email protected])
package hls
import (
"bytes"
"fmt"
"time"
"github.com/q191201771/lal/pkg/mpegts"
"github.com/q191201771/lal/pkg/base"
"github.com/q191201771/naza/pkg/nazalog"
)
// TODO chef: 转换TS流的功能(通过回调供httpts使用)也放在了Muxer中,好处是hls和httpts可以共用一份TS流。
// 后续从架构上考虑,packet hls,mpegts,logic的分工
type MuxerObserver interface {
OnPatPmt(b []byte)
// @param rawFrame TS流,回调结束后,内部不再使用该内存块
// @param boundary 新的TS流接收者,应该从该标志为true时开始发送数据
//
OnTsPackets(rawFrame []byte, boundary bool)
}
type MuxerConfig struct {
OutPath string `json:"out_path"` // m3u8和ts文件的输出根目录,注意,末尾需以'/'结束
FragmentDurationMs int `json:"fragment_duration_ms"`
FragmentNum int `json:"fragment_num"`
// hls文件清理模式:
// 0 不删除m3u8+ts文件,可用于录制等场景
// 1 在输入流结束后删除m3u8+ts文件
// 注意,确切的删除时间是推流结束后的<fragment_duration_ms> * <fragment_num> * 2的时间点
// 推迟一小段时间删除,是为了避免输入流刚结束,hls的拉流端还没有拉取完
// 2 推流过程中,持续删除过期的ts文件,只保留最近的<fragment_num> * 2个左右的ts文件
// TODO chef: lalserver的模式1的逻辑是在上层做的,应该重构到hls模块中
CleanupMode int `json:"cleanup_mode"`
}
const (
CleanupModeNever = 0
CleanupModeInTheEnd = 1
CleanupModeAsap = 2
)
// 输入rtmp流,转出hls(m3u8+ts)至文件中,并回调给上层转出ts流
type Muxer struct {
UniqueKey string
streamName string // const after init
outPath string // const after init
playlistFilename string // const after init
playlistFilenameBak string // const after init
recordPlayListFilename string // const after init
recordPlayListFilenameBak string // const after init
config *MuxerConfig |
fragment Fragment
opened bool
videoCc uint8
audioCc uint8
fragTs uint64 // 新建立fragment时的时间戳,毫秒 * 90
nfrags int // 大序号,增长到config.FragmentNum后,就增长frag
frag int // 写入m3u8的EXT-X-MEDIA-SEQUENCE字段
frags []fragmentInfo // TS文件的固定大小环形队列,记录TS的信息
recordMaxFragDuration float64
streamer *Streamer
patpmt []byte
}
// 记录fragment的一些信息,注意,写m3u8文件时可能还需要用到历史fragment的信息
type fragmentInfo struct {
id int // fragment的自增序号
duration float64 // 当前fragment中数据的时长,单位秒
discont bool // #EXT-X-DISCONTINUITY
filename string
}
// @param enable 如果false,说明hls功能没开,也即不写文件,但是MuxerObserver依然会回调
// @param observer 可以为nil,如果不为nil,TS流将回调给上层
func NewMuxer(streamName string, enable bool, config *MuxerConfig, observer MuxerObserver) *Muxer {
uk := base.GenUkHlsMuxer()
op := PathStrategy.GetMuxerOutPath(config.OutPath, streamName)
playlistFilename := PathStrategy.GetLiveM3u8FileName(op, streamName)
recordPlaylistFilename := PathStrategy.GetRecordM3u8FileName(op, streamName)
playlistFilenameBak := fmt.Sprintf("%s.bak", playlistFilename)
recordPlaylistFilenameBak := fmt.Sprintf("%s.bak", recordPlaylistFilename)
frags := make([]fragmentInfo, 2*config.FragmentNum+1)
m := &Muxer{
UniqueKey: uk,
streamName: streamName,
outPath: op,
playlistFilename: playlistFilename,
playlistFilenameBak: playlistFilenameBak,
recordPlayListFilename: recordPlaylistFilename,
recordPlayListFilenameBak: recordPlaylistFilenameBak,
enable: enable,
config: config,
observer: observer,
frags: frags,
}
streamer := NewStreamer(m)
m.streamer = streamer
nazalog.Infof("[%s] lifecycle new hls muxer. muxer=%p, streamName=%s", uk, m, streamName)
return m
}
func (m *Muxer) Start() {
nazalog.Infof("[%s] start hls muxer.", m.UniqueKey)
m.ensureDir()
}
func (m *Muxer) Dispose() {
nazalog.Infof("[%s] lifecycle dispose hls muxer.", m.UniqueKey)
m.streamer.FlushAudio()
if err := m.closeFragment(true); err != nil {
nazalog.Errorf("[%s] close fragment error. err=%+v", m.UniqueKey, err)
}
}
// @param msg 函数调用结束后,内部不持有msg中的内存块
//
func (m *Muxer) FeedRtmpMessage(msg base.RtmpMsg) {
m.streamer.FeedRtmpMessage(msg)
}
func (m *Muxer) OnPatPmt(b []byte) {
m.patpmt = b
if m.observer != nil {
m.observer.OnPatPmt(b)
}
}
func (m *Muxer) OnFrame(streamer *Streamer, frame *mpegts.Frame) {
var boundary bool
var packets []byte
if frame.Sid == mpegts.StreamIdAudio {
// 为了考虑没有视频的情况也能切片,所以这里判断spspps为空时,也建议生成fragment
boundary = !streamer.VideoSeqHeaderCached()
if err := m.updateFragment(frame.Pts, boundary); err != nil {
nazalog.Errorf("[%s] update fragment error. err=%+v", m.UniqueKey, err)
return
}
if !m.opened {
nazalog.Warnf("[%s] OnFrame A not opened. boundary=%t", m.UniqueKey, boundary)
return
}
//nazalog.Debugf("[%s] WriteFrame A. dts=%d, len=%d", m.UniqueKey, frame.DTS, len(frame.Raw))
} else {
//nazalog.Debugf("[%s] OnFrame V. dts=%d, len=%d", m.UniqueKey, frame.Dts, len(frame.Raw))
// 收到视频,可能触发建立fragment的条件是:
// 关键帧数据 &&
// ((没有收到过音频seq header) || -> 只有视频
// (收到过音频seq header && fragment没有打开) || -> 音视频都有,且都已ready
// (收到过音频seq header && fragment已经打开 && 音频缓存数据不为空) -> 为什么音频缓存需不为空?
// )
boundary = frame.Key && (!streamer.AudioSeqHeaderCached() || !m.opened || !streamer.AudioCacheEmpty())
if err := m.updateFragment(frame.Dts, boundary); err != nil {
nazalog.Errorf("[%s] update fragment error. err=%+v", m.UniqueKey, err)
return
}
if !m.opened {
nazalog.Warnf("[%s] OnFrame V not opened. boundary=%t, key=%t", m.UniqueKey, boundary, frame.Key)
return
}
//nazalog.Debugf("[%s] WriteFrame V. dts=%d, len=%d", m.UniqueKey, frame.Dts, len(frame.Raw))
}
mpegts.PackTsPacket(frame, func(packet []byte) {
if m.enable {
if err := m.fragment.WriteFile(packet); err != nil {
nazalog.Errorf("[%s] fragment write error. err=%+v", m.UniqueKey, err)
return
}
}
if m.observer != nil {
packets = append(packets, packet...)
}
})
if m.observer != nil {
m.observer.OnTsPackets(packets, boundary)
}
}
func (m *Muxer) OutPath() string {
return m.outPath
}
// 决定是否开启新的TS切片文件(注意,可能已经有TS切片,也可能没有,这是第一个切片)
//
// @param boundary 调用方认为可能是开启新TS切片的时间点
//
func (m *Muxer) updateFragment(ts uint64, boundary bool) error {
discont := true
// 如果已经有TS切片,检查是否需要强制开启新的切片,以及切片是否发生跳跃
// 注意,音频和视频是在一起检查的
if m.opened {
f := m.getCurrFrag()
// 以下情况,强制开启新的分片:
// 1. 当前时间戳 - 当前分片的初始时间戳 > 配置中单个ts分片时长的10倍
// 原因可能是:
// 1. 当前包的时间戳发生了大的跳跃
// 2. 一直没有I帧导致没有合适的时间重新切片,堆积的包达到阈值
// 2. 往回跳跃超过了阈值
//
maxfraglen := uint64(m.config.FragmentDurationMs * 90 * 10)
if (ts > m.fragTs && ts-m.fragTs > maxfraglen) || (m.fragTs > ts && m.fragTs-ts > negMaxfraglen) {
nazalog.Warnf("[%s] force fragment split. fragTs=%d, ts=%d", m.UniqueKey, m.fragTs, ts)
if err := m.closeFragment(false); err != nil {
return err
}
if err := m.openFragment(ts, true); err != nil {
return err
}
}
// 更新当前分片的时间长度
//
// TODO chef:
// f.duration(也即写入m3u8中记录分片时间长度)的做法我觉得有问题
// 此处用最新收到的数据更新f.duration
// 但是假设fragment翻滚,数据可能是写入下一个分片中
// 是否就导致了f.duration和实际分片时间长度不一致
if ts > m.fragTs {
duration := float64(ts-m.fragTs) / 90000
if duration > f.duration {
f.duration = duration
}
}
discont = false
// 已经有TS切片,切片时长没有达到设置的阈值,则不开启新的切片
if f.duration < float64(m.config.FragmentDurationMs)/1000 {
return nil
}
}
// 开启新的fragment
// 此时的情况是,上层认为是合适的开启分片的时机(比如是I帧),并且
// 1. 当前是第一个分片
// 2. 当前不是第一个分片,但是上一个分片已经达到配置时长
if boundary {
if err := m.closeFragment(false); err != nil {
return err
}
if err := m.openFragment(ts, discont); err != nil {
return err
}
}
return nil
}
// @param discont 不连续标志,会在m3u8文件的fragment前增加`#EXT-X-DISCONTINUITY`
//
func (m *Muxer) openFragment(ts uint64, discont bool) error {
if m.opened {
return ErrHls
}
id := m.getFragmentId()
filename := PathStrategy.GetTsFileName(m.streamName, id, int(time.Now().UnixNano()/1e6))
filenameWithPath := PathStrategy.GetTsFileNameWithPath(m.outPath, filename)
if m.enable {
if err := m.fragment.OpenFile(filenameWithPath); err != nil {
return err
}
if err := m.fragment.WriteFile(m.patpmt); err != nil {
return err
}
}
m.opened = true
frag := m.getCurrFrag()
frag.discont = discont
frag.id = id
frag.filename = filename
frag.duration = 0
m.fragTs = ts
// nrm said: start fragment with audio to make iPhone happy
m.streamer.FlushAudio()
return nil
}
func (m *Muxer) closeFragment(isLast bool) error {
if !m.opened {
// 注意,首次调用closeFragment时,有可能opened为false
return nil
}
if m.enable {
if err := m.fragment.CloseFile(); err != nil {
return err
}
}
m.opened = false
// 更新序号,为下个分片做准备
// 注意,后面getFrag和getCurrFrag的调用,都依赖该处
m.incrFrag()
m.writePlaylist(isLast)
if m.config.CleanupMode == CleanupModeNever || m.config.CleanupMode == CleanupModeInTheEnd {
m.writeRecordPlaylist(isLast)
}
if m.config.CleanupMode == CleanupModeAsap {
// 删除过期文件
// 注意,此处获取的是环形队列该位置的上一轮残留下的信息
//
frag := m.getCurrFrag()
if frag.filename != "" {
filenameWithPath := PathStrategy.GetTsFileNameWithPath(m.outPath, frag.filename)
if err := fslCtx.Remove(filenameWithPath); err != nil {
nazalog.Warnf("[%s] remove stale fragment file failed. filename=%s, err=%+v", m.UniqueKey, filenameWithPath, err)
}
}
}
return nil
}
func (m *Muxer) writeRecordPlaylist(isLast bool) {
if !m.enable {
return
}
// 找出整个直播流从开始到结束最大的分片时长
// 注意,由于前面已经incr过了,所以这里-1获取
//frag := m.getCurrFrag()
currFrag := m.getFrag(m.nfrags - 1)
if currFrag.duration > m.recordMaxFragDuration {
m.recordMaxFragDuration = currFrag.duration + 0.5
}
fragLines := fmt.Sprintf("#EXTINF:%.3f,\n%s\n", currFrag.duration, currFrag.filename)
content, err := fslCtx.ReadFile(m.recordPlayListFilename)
if err == nil {
// m3u8文件已经存在
content = bytes.TrimSuffix(content, []byte("#EXT-X-ENDLIST\n"))
content, err = updateTargetDurationInM3u8(content, int(m.recordMaxFragDuration))
if err != nil {
nazalog.Errorf("[%s] update target duration failed. err=%+v", m.UniqueKey, err)
return
}
if currFrag.discont {
content = append(content, []byte("#EXT-X-DISCONTINUITY\n")...)
}
content = append(content, []byte(fragLines)...)
content = append(content, []byte("#EXT-X-ENDLIST\n")...)
} else {
// m3u8文件不存在
var buf bytes.Buffer
buf.WriteString("#EXTM3U\n")
buf.WriteString("#EXT-X-VERSION:3\n")
buf.WriteString(fmt.Sprintf("#EXT-X-TARGETDURATION:%d\n", int(m.recordMaxFragDuration)))
buf.WriteString(fmt.Sprintf("#EXT-X-MEDIA-SEQUENCE:%d\n\n", 0))
if currFrag.discont {
buf.WriteString("#EXT-X-DISCONTINUITY\n")
}
buf.WriteString(fragLines)
buf.WriteString("#EXT-X-ENDLIST\n")
content = buf.Bytes()
}
if err := writeM3u8File(content, m.recordPlayListFilename, m.recordPlayListFilenameBak); err != nil {
nazalog.Errorf("[%s] write record m3u8 file error. err=%+v", m.UniqueKey, err)
}
}
func (m *Muxer) writePlaylist(isLast bool) {
if !m.enable {
return
}
// 找出时长最长的fragment
maxFrag := float64(m.config.FragmentDurationMs) / 1000
for i := 0; i < m.nfrags; i++ {
frag := m.getFrag(i)
if frag.duration > maxFrag {
maxFrag = frag.duration + 0.5
}
}
// TODO chef 优化这块buffer的构造
var buf bytes.Buffer
buf.WriteString("#EXTM3U\n")
buf.WriteString("#EXT-X-VERSION:3\n")
buf.WriteString("#EXT-X-ALLOW-CACHE:NO\n")
buf.WriteString(fmt.Sprintf("#EXT-X-TARGETDURATION:%d\n", int(maxFrag)))
buf.WriteString(fmt.Sprintf("#EXT-X-MEDIA-SEQUENCE:%d\n\n", m.frag))
for i := 0; i < m.nfrags; i++ {
frag := m.getFrag(i)
if frag.discont {
buf.WriteString("#EXT-X-DISCONTINUITY\n")
}
buf.WriteString(fmt.Sprintf("#EXTINF:%.3f,\n%s\n", frag.duration, frag.filename))
}
if isLast {
buf.WriteString("#EXT-X-ENDLIST\n")
}
if err := writeM3u8File(buf.Bytes(), m.playlistFilename, m.playlistFilenameBak); err != nil {
nazalog.Errorf("[%s] write live m3u8 file error. err=%+v", m.UniqueKey, err)
}
}
func (m *Muxer) ensureDir() {
if !m.enable {
return
}
//err := fslCtx.RemoveAll(m.outPath)
//nazalog.Assert(nil, err)
err := fslCtx.MkdirAll(m.outPath, 0777)
nazalog.Assert(nil, err)
}
func (m *Muxer) getFragmentId() int {
return m.frag + m.nfrags
}
func (m *Muxer) getFrag(n int) *fragmentInfo {
return &m.frags[(m.frag+n)%(m.config.FragmentNum*2+1)]
}
func (m *Muxer) getCurrFrag() *fragmentInfo {
return m.getFrag(m.nfrags)
}
func (m *Muxer) incrFrag() {
if m.nfrags == m.config.FragmentNum {
m.frag++
} else {
m.nfrags++
}
} | enable bool
observer MuxerObserver |
field_reader_test.go | package metricsdata
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestField_read(t *testing.T) {
block := mockMetricMergeBlock([]uint32{1}, 5, 5)
r, err := NewReader("1.sst", block)
assert.NoError(t, err)
assert.NotNil(t, r)
scanner := newDataScanner(r)
seriesPos := scanner.scan(0, 1)
fReader := newFieldReader(block, seriesPos, 5, 5)
start, end := fReader.slotRange()
assert.Equal(t, uint16(5), start)
assert.Equal(t, uint16(5), end)
// case 1: field(2) > field(0), not exist
data := fReader.getPrimitiveData(1, 0)
assert.Nil(t, data)
// case 2: field(2) = field(2) but pID(0)<pID(1), not exist
data = fReader.getPrimitiveData(2, 0)
assert.Nil(t, data)
// case 3: field(2) = field(2) and pID(1)=pID(1), found
data = fReader.getPrimitiveData(2, 1)
assert.True(t, len(data) > 0)
// case 4: field(2) = field(2) and pID(3)>pID(1), not exist, go next field
data = fReader.getPrimitiveData(2, 3)
assert.Nil(t, data)
// case 5: field(10) = field(10) and pID(2)=pID(2), found
data = fReader.getPrimitiveData(10, 2)
assert.True(t, len(data) > 0)
// case 5: field(10) = field(10) and pID(3)>pID(2), completed
data = fReader.getPrimitiveData(10, 3)
assert.Nil(t, data)
// case 6: after completed return nil | assert.Nil(t, data)
// case 7: no fields
fReader = newFieldReader([]byte{0, 0, 0}, 0, 5, 5)
data = fReader.getPrimitiveData(10, 2)
assert.Nil(t, data)
// case 8: reset, field(100) > field(10) , completed
fReader.reset(block, seriesPos, 5, 5)
data = fReader.getPrimitiveData(2, 1)
assert.True(t, len(data) > 0)
data = fReader.getPrimitiveData(10, 2)
assert.True(t, len(data) > 0)
data = fReader.getPrimitiveData(100, 2)
assert.Nil(t, data)
data = fReader.getPrimitiveData(10, 2)
assert.Nil(t, data)
} | data = fReader.getPrimitiveData(10, 2) |
tcp.rs | use std::{io, net::SocketAddr};
use async_trait::async_trait;
use bytes::BytesMut;
use tokio::io::AsyncWriteExt;
use super::shadow::ShadowedStream;
use crate::{
app::SyncDnsClient,
proxy::{OutboundConnect, ProxyStream, SimpleProxyStream, TcpConnector, TcpOutboundHandler},
session::{Session, SocksAddrWireType},
};
pub struct Handler {
pub address: String,
pub port: u16,
pub cipher: String,
pub password: String,
pub bind_addr: SocketAddr,
pub dns_client: SyncDnsClient,
}
impl TcpConnector for Handler {}
#[async_trait]
impl TcpOutboundHandler for Handler {
fn connect_addr(&self) -> Option<OutboundConnect> |
async fn handle<'a>(
&'a self,
sess: &'a Session,
stream: Option<Box<dyn ProxyStream>>,
) -> io::Result<Box<dyn ProxyStream>> {
let stream = if let Some(stream) = stream {
stream
} else {
self.new_tcp_stream(
self.dns_client.clone(),
&self.bind_addr,
&self.address,
&self.port,
)
.await?
};
let mut stream = ShadowedStream::new(stream, &self.cipher, &self.password)?;
let mut buf = BytesMut::new();
sess.destination
.write_buf(&mut buf, SocksAddrWireType::PortLast)?;
// FIXME combine header and first payload
stream.write_all(&buf).await?;
Ok(Box::new(SimpleProxyStream(stream)))
}
}
| {
if !self.address.is_empty() && self.port != 0 {
Some(OutboundConnect::Proxy(
self.address.clone(),
self.port,
self.bind_addr,
))
} else {
None
}
} |
future.rs | use crate::{
fib::{self, Fiber},
sync::spsc::oneshot::{channel, Canceled, Receiver},
thr::prelude::*,
};
use core::{
future::Future,
intrinsics::unreachable,
pin::Pin,
task::{Context, Poll},
};
/// A future that resolves on completion of the fiber from another thread.
///
/// Dropping or closing this future will remove the fiber on a next thread
/// invocation without resuming it.
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct FiberFuture<T> {
rx: Receiver<T>,
}
#[marker]
pub trait YieldNone: Send + 'static {}
impl YieldNone for () {}
impl YieldNone for ! {}
impl<T> FiberFuture<T> {
/// Gracefully close this future.
///
/// The fiber will be removed on a next thread invocation without resuming.
#[inline]
pub fn close(&mut self) {
self.rx.close()
}
}
impl<T> Future for FiberFuture<T> {
type Output = T;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> |
}
/// Extends [`ThrToken`](crate::thr::ThrToken) types with `add_future` method.
pub trait ThrFiberFuture: ThrToken {
/// Adds the fiber `fib` to the fiber chain and returns a future, which
/// resolves on completion of the fiber.
#[inline]
fn add_future<F, Y, T>(self, fib: F) -> FiberFuture<T>
where
F: Fiber<Input = (), Yield = Y, Return = T>,
Y: YieldNone,
F: Send + 'static,
T: Send + 'static,
{
FiberFuture { rx: add_rx(self, fib) }
}
}
#[inline]
fn add_rx<H, F, Y, T>(thr: H, mut fib: F) -> Receiver<T>
where
H: ThrToken,
F: Fiber<Input = (), Yield = Y, Return = T>,
Y: YieldNone,
F: Send + 'static,
T: Send + 'static,
{
let (tx, rx) = channel();
thr.add(move || {
loop {
if tx.is_canceled() {
break;
}
match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) {
fib::Yielded(_) => {}
fib::Complete(complete) => {
drop(tx.send(complete));
break;
}
}
yield;
}
});
rx
}
impl<T: ThrToken> ThrFiberFuture for T {}
| {
let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) };
rx.poll(cx).map(|value| match value {
Ok(value) => value,
Err(Canceled) => unsafe { unreachable() },
})
} |
net.rs | use cmp;
use ffi::CString;
use fmt;
use io::{self, Error, ErrorKind};
use libc::{c_int, c_void};
use mem;
use net::{SocketAddr, Shutdown, Ipv4Addr, Ipv6Addr};
use ptr;
use sys::net::{cvt, cvt_r, cvt_gai, Socket, init, wrlen_t};
use sys::net::netc as c;
use sys_common::{AsInner, FromInner, IntoInner};
use time::Duration;
use convert::{TryFrom, TryInto};
#[cfg(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
target_os = "solaris", target_os = "haiku", target_os = "l4re"))]
use sys::net::netc::IPV6_JOIN_GROUP as IPV6_ADD_MEMBERSHIP;
#[cfg(not(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
target_os = "solaris", target_os = "haiku", target_os = "l4re")))]
use sys::net::netc::IPV6_ADD_MEMBERSHIP;
#[cfg(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
target_os = "solaris", target_os = "haiku", target_os = "l4re"))]
use sys::net::netc::IPV6_LEAVE_GROUP as IPV6_DROP_MEMBERSHIP;
#[cfg(not(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
target_os = "solaris", target_os = "haiku", target_os = "l4re")))]
use sys::net::netc::IPV6_DROP_MEMBERSHIP;
#[cfg(any(target_os = "linux", target_os = "android",
target_os = "dragonfly", target_os = "freebsd",
target_os = "openbsd", target_os = "netbsd",
target_os = "haiku", target_os = "bitrig"))]
use libc::MSG_NOSIGNAL;
#[cfg(not(any(target_os = "linux", target_os = "android",
target_os = "dragonfly", target_os = "freebsd",
target_os = "openbsd", target_os = "netbsd",
target_os = "haiku", target_os = "bitrig")))]
const MSG_NOSIGNAL: c_int = 0x0;
////////////////////////////////////////////////////////////////////////////////
// sockaddr and misc bindings
////////////////////////////////////////////////////////////////////////////////
pub fn setsockopt<T>(sock: &Socket, opt: c_int, val: c_int,
payload: T) -> io::Result<()> {
unsafe {
let payload = &payload as *const T as *const c_void;
cvt(c::setsockopt(*sock.as_inner(), opt, val, payload,
mem::size_of::<T>() as c::socklen_t))?;
Ok(())
}
}
pub fn getsockopt<T: Copy>(sock: &Socket, opt: c_int,
val: c_int) -> io::Result<T> {
unsafe {
let mut slot: T = mem::zeroed();
let mut len = mem::size_of::<T>() as c::socklen_t;
cvt(c::getsockopt(*sock.as_inner(), opt, val,
&mut slot as *mut _ as *mut _,
&mut len))?;
assert_eq!(len as usize, mem::size_of::<T>());
Ok(slot)
}
}
fn sockname<F>(f: F) -> io::Result<SocketAddr>
where F: FnOnce(*mut c::sockaddr, *mut c::socklen_t) -> c_int
{
unsafe {
let mut storage: c::sockaddr_storage = mem::zeroed();
let mut len = mem::size_of_val(&storage) as c::socklen_t;
cvt(f(&mut storage as *mut _ as *mut _, &mut len))?;
sockaddr_to_addr(&storage, len as usize)
}
}
pub fn sockaddr_to_addr(storage: &c::sockaddr_storage,
len: usize) -> io::Result<SocketAddr> {
match storage.ss_family as c_int {
c::AF_INET => {
assert!(len as usize >= mem::size_of::<c::sockaddr_in>());
Ok(SocketAddr::V4(FromInner::from_inner(unsafe {
*(storage as *const _ as *const c::sockaddr_in)
})))
}
c::AF_INET6 => {
assert!(len as usize >= mem::size_of::<c::sockaddr_in6>());
Ok(SocketAddr::V6(FromInner::from_inner(unsafe {
*(storage as *const _ as *const c::sockaddr_in6)
})))
}
_ => {
Err(Error::new(ErrorKind::InvalidInput, "invalid argument"))
}
}
}
#[cfg(target_os = "android")]
fn to_ipv6mr_interface(value: u32) -> c_int {
value as c_int
}
#[cfg(not(target_os = "android"))]
fn to_ipv6mr_interface(value: u32) -> ::libc::c_uint {
value as ::libc::c_uint
}
////////////////////////////////////////////////////////////////////////////////
// get_host_addresses
////////////////////////////////////////////////////////////////////////////////
pub struct LookupHost {
original: *mut c::addrinfo,
cur: *mut c::addrinfo,
port: u16
}
impl LookupHost {
pub fn port(&self) -> u16 {
self.port
}
}
impl Iterator for LookupHost {
type Item = SocketAddr;
fn next(&mut self) -> Option<SocketAddr> {
loop {
unsafe {
let cur = self.cur.as_ref()?;
self.cur = cur.ai_next;
match sockaddr_to_addr(mem::transmute(cur.ai_addr),
cur.ai_addrlen as usize)
{
Ok(addr) => return Some(addr),
Err(_) => continue,
}
}
}
}
}
unsafe impl Sync for LookupHost {}
unsafe impl Send for LookupHost {}
impl Drop for LookupHost {
fn drop(&mut self) {
unsafe { c::freeaddrinfo(self.original) }
}
}
impl<'a> TryFrom<&'a str> for LookupHost {
type Error = io::Error;
fn try_from(s: &str) -> io::Result<LookupHost> {
macro_rules! try_opt {
($e:expr, $msg:expr) => (
match $e {
Some(r) => r,
None => return Err(io::Error::new(io::ErrorKind::InvalidInput,
$msg)),
}
)
}
// split the string by ':' and convert the second part to u16
let mut parts_iter = s.rsplitn(2, ':');
let port_str = try_opt!(parts_iter.next(), "invalid socket address");
let host = try_opt!(parts_iter.next(), "invalid socket address");
let port: u16 = try_opt!(port_str.parse().ok(), "invalid port value");
(host, port).try_into()
}
}
impl<'a> TryFrom<(&'a str, u16)> for LookupHost {
type Error = io::Error;
fn try_from((host, port): (&'a str, u16)) -> io::Result<LookupHost> {
init();
let c_host = CString::new(host)?;
let mut hints: c::addrinfo = unsafe { mem::zeroed() };
hints.ai_socktype = c::SOCK_STREAM;
let mut res = ptr::null_mut();
unsafe {
cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), &hints, &mut res)).map(|_| {
LookupHost { original: res, cur: res, port }
})
}
}
}
////////////////////////////////////////////////////////////////////////////////
// TCP streams
////////////////////////////////////////////////////////////////////////////////
pub struct TcpStream {
inner: Socket,
}
impl TcpStream {
pub fn connect(addr: io::Result<&SocketAddr>) -> io::Result<TcpStream> {
let addr = addr?;
init();
let sock = Socket::new(addr, c::SOCK_STREAM)?;
let (addrp, len) = addr.into_inner();
cvt_r(|| unsafe { c::connect(*sock.as_inner(), addrp, len) })?;
Ok(TcpStream { inner: sock })
}
pub fn connect_timeout(addr: &SocketAddr, timeout: Duration) -> io::Result<TcpStream> {
init();
let sock = Socket::new(addr, c::SOCK_STREAM)?;
sock.connect_timeout(addr, timeout)?;
Ok(TcpStream { inner: sock })
}
pub fn socket(&self) -> &Socket { &self.inner }
pub fn into_socket(self) -> Socket { self.inner }
pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.inner.set_timeout(dur, c::SO_RCVTIMEO)
}
pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.inner.set_timeout(dur, c::SO_SNDTIMEO)
}
pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
self.inner.timeout(c::SO_RCVTIMEO)
}
pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
self.inner.timeout(c::SO_SNDTIMEO)
}
pub fn peek(&self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.peek(buf)
}
pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.read(buf)
}
pub fn write(&self, buf: &[u8]) -> io::Result<usize> {
let len = cmp::min(buf.len(), <wrlen_t>::max_value() as usize) as wrlen_t;
let ret = cvt(unsafe {
c::send(*self.inner.as_inner(),
buf.as_ptr() as *const c_void,
len,
MSG_NOSIGNAL)
})?;
Ok(ret as usize)
}
pub fn peer_addr(&self) -> io::Result<SocketAddr> {
sockname(|buf, len| unsafe {
c::getpeername(*self.inner.as_inner(), buf, len)
})
}
pub fn socket_addr(&self) -> io::Result<SocketAddr> {
sockname(|buf, len| unsafe {
c::getsockname(*self.inner.as_inner(), buf, len)
})
}
pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {
self.inner.shutdown(how)
}
pub fn duplicate(&self) -> io::Result<TcpStream> {
self.inner.duplicate().map(|s| TcpStream { inner: s })
}
pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> {
self.inner.set_nodelay(nodelay)
}
pub fn nodelay(&self) -> io::Result<bool> {
self.inner.nodelay()
}
pub fn set_ttl(&self, ttl: u32) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_TTL, ttl as c_int)
}
pub fn ttl(&self) -> io::Result<u32> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IP, c::IP_TTL)?;
Ok(raw as u32)
}
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.inner.take_error()
}
pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {
self.inner.set_nonblocking(nonblocking)
}
}
impl FromInner<Socket> for TcpStream {
fn from_inner(socket: Socket) -> TcpStream {
TcpStream { inner: socket }
}
}
impl fmt::Debug for TcpStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut res = f.debug_struct("TcpStream");
if let Ok(addr) = self.socket_addr() {
res.field("addr", &addr);
}
if let Ok(peer) = self.peer_addr() {
res.field("peer", &peer);
}
let name = if cfg!(windows) {"socket"} else {"fd"};
res.field(name, &self.inner.as_inner())
.finish()
}
}
////////////////////////////////////////////////////////////////////////////////
// TCP listeners
////////////////////////////////////////////////////////////////////////////////
pub struct TcpListener {
inner: Socket,
}
impl TcpListener {
pub fn bind(addr: io::Result<&SocketAddr>) -> io::Result<TcpListener> {
let addr = addr?;
init();
let sock = Socket::new(addr, c::SOCK_STREAM)?;
// On platforms with Berkeley-derived sockets, this allows
// to quickly rebind a socket, without needing to wait for
// the OS to clean up the previous one.
if !cfg!(windows) {
setsockopt(&sock, c::SOL_SOCKET, c::SO_REUSEADDR,
1 as c_int)?;
}
// Bind our new socket
let (addrp, len) = addr.into_inner();
cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
// Start listening
cvt(unsafe { c::listen(*sock.as_inner(), 128) })?;
Ok(TcpListener { inner: sock })
}
pub fn socket(&self) -> &Socket { &self.inner }
pub fn into_socket(self) -> Socket { self.inner }
pub fn socket_addr(&self) -> io::Result<SocketAddr> {
sockname(|buf, len| unsafe {
c::getsockname(*self.inner.as_inner(), buf, len)
})
}
pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> {
let mut storage: c::sockaddr_storage = unsafe { mem::zeroed() };
let mut len = mem::size_of_val(&storage) as c::socklen_t;
let sock = self.inner.accept(&mut storage as *mut _ as *mut _,
&mut len)?;
let addr = sockaddr_to_addr(&storage, len as usize)?;
Ok((TcpStream { inner: sock, }, addr))
}
pub fn duplicate(&self) -> io::Result<TcpListener> {
self.inner.duplicate().map(|s| TcpListener { inner: s })
}
pub fn set_ttl(&self, ttl: u32) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_TTL, ttl as c_int)
}
pub fn ttl(&self) -> io::Result<u32> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IP, c::IP_TTL)?;
Ok(raw as u32)
}
pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_V6ONLY, only_v6 as c_int)
}
pub fn only_v6(&self) -> io::Result<bool> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_V6ONLY)?;
Ok(raw != 0)
}
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.inner.take_error()
}
pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {
self.inner.set_nonblocking(nonblocking)
}
}
impl FromInner<Socket> for TcpListener {
fn from_inner(socket: Socket) -> TcpListener {
TcpListener { inner: socket }
}
}
impl fmt::Debug for TcpListener {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut res = f.debug_struct("TcpListener");
if let Ok(addr) = self.socket_addr() {
res.field("addr", &addr);
}
let name = if cfg!(windows) | else {"fd"};
res.field(name, &self.inner.as_inner())
.finish()
}
}
////////////////////////////////////////////////////////////////////////////////
// UDP
////////////////////////////////////////////////////////////////////////////////
pub struct UdpSocket {
inner: Socket,
}
impl UdpSocket {
pub fn bind(addr: io::Result<&SocketAddr>) -> io::Result<UdpSocket> {
let addr = addr?;
init();
let sock = Socket::new(addr, c::SOCK_DGRAM)?;
let (addrp, len) = addr.into_inner();
cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
Ok(UdpSocket { inner: sock })
}
pub fn socket(&self) -> &Socket { &self.inner }
pub fn into_socket(self) -> Socket { self.inner }
pub fn socket_addr(&self) -> io::Result<SocketAddr> {
sockname(|buf, len| unsafe {
c::getsockname(*self.inner.as_inner(), buf, len)
})
}
pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
self.inner.recv_from(buf)
}
pub fn peek_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
self.inner.peek_from(buf)
}
pub fn send_to(&self, buf: &[u8], dst: &SocketAddr) -> io::Result<usize> {
let len = cmp::min(buf.len(), <wrlen_t>::max_value() as usize) as wrlen_t;
let (dstp, dstlen) = dst.into_inner();
let ret = cvt(unsafe {
c::sendto(*self.inner.as_inner(),
buf.as_ptr() as *const c_void, len,
MSG_NOSIGNAL, dstp, dstlen)
})?;
Ok(ret as usize)
}
pub fn duplicate(&self) -> io::Result<UdpSocket> {
self.inner.duplicate().map(|s| UdpSocket { inner: s })
}
pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.inner.set_timeout(dur, c::SO_RCVTIMEO)
}
pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.inner.set_timeout(dur, c::SO_SNDTIMEO)
}
pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
self.inner.timeout(c::SO_RCVTIMEO)
}
pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
self.inner.timeout(c::SO_SNDTIMEO)
}
pub fn set_broadcast(&self, broadcast: bool) -> io::Result<()> {
setsockopt(&self.inner, c::SOL_SOCKET, c::SO_BROADCAST, broadcast as c_int)
}
pub fn broadcast(&self) -> io::Result<bool> {
let raw: c_int = getsockopt(&self.inner, c::SOL_SOCKET, c::SO_BROADCAST)?;
Ok(raw != 0)
}
pub fn set_multicast_loop_v4(&self, multicast_loop_v4: bool) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_MULTICAST_LOOP, multicast_loop_v4 as c_int)
}
pub fn multicast_loop_v4(&self) -> io::Result<bool> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IP, c::IP_MULTICAST_LOOP)?;
Ok(raw != 0)
}
pub fn set_multicast_ttl_v4(&self, multicast_ttl_v4: u32) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_MULTICAST_TTL, multicast_ttl_v4 as c_int)
}
pub fn multicast_ttl_v4(&self) -> io::Result<u32> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IP, c::IP_MULTICAST_TTL)?;
Ok(raw as u32)
}
pub fn set_multicast_loop_v6(&self, multicast_loop_v6: bool) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_MULTICAST_LOOP, multicast_loop_v6 as c_int)
}
pub fn multicast_loop_v6(&self) -> io::Result<bool> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IPV6, c::IPV6_MULTICAST_LOOP)?;
Ok(raw != 0)
}
pub fn join_multicast_v4(&self, multiaddr: &Ipv4Addr, interface: &Ipv4Addr)
-> io::Result<()> {
let mreq = c::ip_mreq {
imr_multiaddr: *multiaddr.as_inner(),
imr_interface: *interface.as_inner(),
};
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_ADD_MEMBERSHIP, mreq)
}
pub fn join_multicast_v6(&self, multiaddr: &Ipv6Addr, interface: u32)
-> io::Result<()> {
let mreq = c::ipv6_mreq {
ipv6mr_multiaddr: *multiaddr.as_inner(),
ipv6mr_interface: to_ipv6mr_interface(interface),
};
setsockopt(&self.inner, c::IPPROTO_IPV6, IPV6_ADD_MEMBERSHIP, mreq)
}
pub fn leave_multicast_v4(&self, multiaddr: &Ipv4Addr, interface: &Ipv4Addr)
-> io::Result<()> {
let mreq = c::ip_mreq {
imr_multiaddr: *multiaddr.as_inner(),
imr_interface: *interface.as_inner(),
};
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_DROP_MEMBERSHIP, mreq)
}
pub fn leave_multicast_v6(&self, multiaddr: &Ipv6Addr, interface: u32)
-> io::Result<()> {
let mreq = c::ipv6_mreq {
ipv6mr_multiaddr: *multiaddr.as_inner(),
ipv6mr_interface: to_ipv6mr_interface(interface),
};
setsockopt(&self.inner, c::IPPROTO_IPV6, IPV6_DROP_MEMBERSHIP, mreq)
}
pub fn set_ttl(&self, ttl: u32) -> io::Result<()> {
setsockopt(&self.inner, c::IPPROTO_IP, c::IP_TTL, ttl as c_int)
}
pub fn ttl(&self) -> io::Result<u32> {
let raw: c_int = getsockopt(&self.inner, c::IPPROTO_IP, c::IP_TTL)?;
Ok(raw as u32)
}
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.inner.take_error()
}
pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {
self.inner.set_nonblocking(nonblocking)
}
pub fn recv(&self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.read(buf)
}
pub fn peek(&self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.peek(buf)
}
pub fn send(&self, buf: &[u8]) -> io::Result<usize> {
let len = cmp::min(buf.len(), <wrlen_t>::max_value() as usize) as wrlen_t;
let ret = cvt(unsafe {
c::send(*self.inner.as_inner(),
buf.as_ptr() as *const c_void,
len,
MSG_NOSIGNAL)
})?;
Ok(ret as usize)
}
pub fn connect(&self, addr: io::Result<&SocketAddr>) -> io::Result<()> {
let (addrp, len) = addr?.into_inner();
cvt_r(|| unsafe { c::connect(*self.inner.as_inner(), addrp, len) }).map(|_| ())
}
}
impl FromInner<Socket> for UdpSocket {
fn from_inner(socket: Socket) -> UdpSocket {
UdpSocket { inner: socket }
}
}
impl fmt::Debug for UdpSocket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut res = f.debug_struct("UdpSocket");
if let Ok(addr) = self.socket_addr() {
res.field("addr", &addr);
}
let name = if cfg!(windows) {"socket"} else {"fd"};
res.field(name, &self.inner.as_inner())
.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
use collections::HashMap;
#[test]
fn no_lookup_host_duplicates() {
let mut addrs = HashMap::new();
let lh = match LookupHost::try_from(("localhost", 0)) {
Ok(lh) => lh,
Err(e) => panic!("couldn't resolve `localhost': {}", e)
};
for sa in lh { *addrs.entry(sa).or_insert(0) += 1; };
assert_eq!(addrs.iter().filter(|&(_, &v)| v > 1).collect::<Vec<_>>(), vec![],
"There should be no duplicate localhost entries");
}
}
| {"socket"} |
common.rs | // Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Common test functions
use grin_core::core::hash::DefaultHashable;
use grin_core::core::{Block, BlockHeader, KernelFeatures, Transaction};
use grin_core::libtx::{
build::{self, input, output},
proof::{ProofBuild, ProofBuilder},
reward,
};
use grin_core::pow::Difficulty;
use grin_core::ser::{self, PMMRable, Readable, Reader, Writeable, Writer};
use keychain::{Identifier, Keychain};
// utility producing a transaction with 2 inputs and a single outputs
#[allow(dead_code)]
pub fn tx2i1o() -> Transaction {
let keychain = keychain::ExtKeychain::from_random_seed(false).unwrap();
let builder = ProofBuilder::new(&keychain);
let key_id1 = keychain::ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = keychain::ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = keychain::ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
build::transaction(
KernelFeatures::Plain { fee: 2 },
vec![input(10, key_id1), input(11, key_id2), output(19, key_id3)],
&keychain,
&builder,
)
.unwrap()
}
// utility producing a transaction with a single input and output
#[allow(dead_code)]
pub fn tx1i1o() -> Transaction {
let keychain = keychain::ExtKeychain::from_random_seed(false).unwrap();
let builder = ProofBuilder::new(&keychain);
let key_id1 = keychain::ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = keychain::ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
build::transaction(
KernelFeatures::Plain { fee: 2 },
vec![input(5, key_id1), output(3, key_id2)],
&keychain,
&builder,
)
.unwrap()
}
// utility producing a transaction with a single input
// and two outputs (one change output)
// Note: this tx has an "offset" kernel
#[allow(dead_code)]
pub fn tx1i2o() -> Transaction {
let keychain = keychain::ExtKeychain::from_random_seed(false).unwrap();
let builder = ProofBuilder::new(&keychain);
let key_id1 = keychain::ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = keychain::ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = keychain::ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
build::transaction(
KernelFeatures::Plain { fee: 2 },
vec![input(6, key_id1), output(3, key_id2), output(1, key_id3)],
&keychain,
&builder,
)
.unwrap()
}
// utility to create a block without worrying about the key or previous
// header
#[allow(dead_code)]
pub fn new_block<K, B>(
txs: Vec<&Transaction>,
keychain: &K,
builder: &B,
previous_header: &BlockHeader,
key_id: &Identifier,
) -> Block
where
K: Keychain,
B: ProofBuild,
|
// utility producing a transaction that spends an output with the provided
// value and blinding key
#[allow(dead_code)]
pub fn txspend1i1o<K, B>(
v: u64,
keychain: &K,
builder: &B,
key_id1: Identifier,
key_id2: Identifier,
) -> Transaction
where
K: Keychain,
B: ProofBuild,
{
build::transaction(
KernelFeatures::Plain { fee: 2 },
vec![input(v, key_id1), output(3, key_id2)],
keychain,
builder,
)
.unwrap()
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct TestElem(pub [u32; 4]);
impl DefaultHashable for TestElem {}
impl PMMRable for TestElem {
type E = Self;
fn as_elmt(&self) -> Self::E {
*self
}
fn elmt_size() -> Option<u16> {
Some(16)
}
}
impl Writeable for TestElem {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_u32(self.0[0])?;
writer.write_u32(self.0[1])?;
writer.write_u32(self.0[2])?;
writer.write_u32(self.0[3])
}
}
impl Readable for TestElem {
fn read(reader: &mut dyn Reader) -> Result<TestElem, ser::Error> {
Ok(TestElem([
reader.read_u32()?,
reader.read_u32()?,
reader.read_u32()?,
reader.read_u32()?,
]))
}
}
| {
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward_output = reward::output(
keychain,
builder,
&key_id,
fees,
false,
previous_header.height + 1,
)
.unwrap();
Block::new(
&previous_header,
txs.into_iter().cloned().collect(),
Difficulty::min(),
reward_output,
)
.unwrap()
} |
ze_generated_example_restorepointcollections_client_test.go | //go:build go1.18
// +build go1.18
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
package armcompute_test
import (
"context"
"log"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute"
)
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/compute/resource-manager/Microsoft.Compute/stable/2022-03-01/ComputeRP/examples/restorePointExamples/RestorePointCollection_CreateOrUpdate_ForCrossRegionCopy.json
func ExampleRestorePointCollectionsClient_CreateOrUpdate() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armcompute.NewRestorePointCollectionsClient("{subscription-id}", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.CreateOrUpdate(ctx,
"myResourceGroup",
"myRpc",
armcompute.RestorePointCollection{
Location: to.Ptr("norwayeast"),
Tags: map[string]*string{
"myTag1": to.Ptr("tagValue1"),
},
Properties: &armcompute.RestorePointCollectionProperties{
Source: &armcompute.RestorePointCollectionSourceProperties{
ID: to.Ptr("/subscriptions/{subscription-id}/resourceGroups/myResourceGroup/providers/Microsoft.Compute/restorePointCollections/sourceRpcName"),
},
},
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/compute/resource-manager/Microsoft.Compute/stable/2022-03-01/ComputeRP/examples/restorePointExamples/RestorePointCollections_Update_MaximumSet_Gen.json
func ExampleRestorePointCollectionsClient_Update() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armcompute.NewRestorePointCollectionsClient("{subscription-id}", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.Update(ctx,
"rgcompute",
"aaaaaaaaaaaaaaaaaaaa",
armcompute.RestorePointCollectionUpdate{
Tags: map[string]*string{
"key8536": to.Ptr("aaaaaaaaaaaaaaaaaaa"),
},
Properties: &armcompute.RestorePointCollectionProperties{
Source: &armcompute.RestorePointCollectionSourceProperties{
ID: to.Ptr("/subscriptions/{subscription-id}/resourceGroups/myResourceGroup/providers/Microsoft.Compute/virtualMachines/myVM"),
},
},
},
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/compute/resource-manager/Microsoft.Compute/stable/2022-03-01/ComputeRP/examples/restorePointExamples/RestorePointCollections_Delete_MaximumSet_Gen.json
func ExampleRestorePointCollectionsClient_BeginDelete() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armcompute.NewRestorePointCollectionsClient("{subscription-id}", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
poller, err := client.BeginDelete(ctx,
"rgcompute",
"aaaaaaaaaaaaaaaaa",
nil)
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
_, err = poller.PollUntilDone(ctx, nil)
if err != nil {
log.Fatalf("failed to pull the result: %v", err)
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/compute/resource-manager/Microsoft.Compute/stable/2022-03-01/ComputeRP/examples/restorePointExamples/RestorePointCollection_Get.json
func ExampleRestorePointCollectionsClient_Get() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armcompute.NewRestorePointCollectionsClient("{subscription-id}", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
res, err := client.Get(ctx,
"myResourceGroup",
"myRpc",
&armcompute.RestorePointCollectionsClientGetOptions{Expand: nil})
if err != nil {
log.Fatalf("failed to finish the request: %v", err)
}
// TODO: use response item
_ = res
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/compute/resource-manager/Microsoft.Compute/stable/2022-03-01/ComputeRP/examples/restorePointExamples/RestorePointCollection_ListByResourceGroup.json
func ExampleRestorePointCollectionsClient_NewListPager() {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armcompute.NewRestorePointCollectionsClient("{subscription-id}", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListPager("myResourceGroup",
nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/compute/resource-manager/Microsoft.Compute/stable/2022-03-01/ComputeRP/examples/restorePointExamples/RestorePointCollection_ListBySubscription.json
func | () {
cred, err := azidentity.NewDefaultAzureCredential(nil)
if err != nil {
log.Fatalf("failed to obtain a credential: %v", err)
}
ctx := context.Background()
client, err := armcompute.NewRestorePointCollectionsClient("{subscription-id}", cred, nil)
if err != nil {
log.Fatalf("failed to create client: %v", err)
}
pager := client.NewListAllPager(nil)
for pager.More() {
nextResult, err := pager.NextPage(ctx)
if err != nil {
log.Fatalf("failed to advance page: %v", err)
}
for _, v := range nextResult.Value {
// TODO: use page item
_ = v
}
}
}
| ExampleRestorePointCollectionsClient_NewListAllPager |
publicipprefixes.go | package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// PublicIPPrefixesClient is the network Client
type PublicIPPrefixesClient struct {
BaseClient
}
// NewPublicIPPrefixesClient creates an instance of the PublicIPPrefixesClient client.
func NewPublicIPPrefixesClient(subscriptionID string) PublicIPPrefixesClient |
// NewPublicIPPrefixesClientWithBaseURI creates an instance of the PublicIPPrefixesClient client.
func NewPublicIPPrefixesClientWithBaseURI(baseURI string, subscriptionID string) PublicIPPrefixesClient {
return PublicIPPrefixesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates a static or dynamic public IP prefix.
// Parameters:
// resourceGroupName - the name of the resource group.
// publicIPPrefixName - the name of the public IP prefix.
// parameters - parameters supplied to the create or update public IP prefix operation.
func (client PublicIPPrefixesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, publicIPPrefixName string, parameters PublicIPPrefix) (result PublicIPPrefixesCreateOrUpdateFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, publicIPPrefixName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client PublicIPPrefixesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, publicIPPrefixName string, parameters PublicIPPrefix) (*http.Request, error) {
pathParameters := map[string]interface{}{
"publicIpPrefixName": autorest.Encode("path", publicIPPrefixName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client PublicIPPrefixesClient) CreateOrUpdateSender(req *http.Request) (future PublicIPPrefixesCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client PublicIPPrefixesClient) CreateOrUpdateResponder(resp *http.Response) (result PublicIPPrefix, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified public IP prefix.
// Parameters:
// resourceGroupName - the name of the resource group.
// publicIPPrefixName - the name of the PublicIpPrefix.
func (client PublicIPPrefixesClient) Delete(ctx context.Context, resourceGroupName string, publicIPPrefixName string) (result PublicIPPrefixesDeleteFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.Delete")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, publicIPPrefixName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client PublicIPPrefixesClient) DeletePreparer(ctx context.Context, resourceGroupName string, publicIPPrefixName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"publicIpPrefixName": autorest.Encode("path", publicIPPrefixName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client PublicIPPrefixesClient) DeleteSender(req *http.Request) (future PublicIPPrefixesDeleteFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client PublicIPPrefixesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the specified public IP prefix in a specified resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
// publicIPPrefixName - the name of the PublicIPPrefx.
// expand - expands referenced resources.
func (client PublicIPPrefixesClient) Get(ctx context.Context, resourceGroupName string, publicIPPrefixName string, expand string) (result PublicIPPrefix, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, publicIPPrefixName, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client PublicIPPrefixesClient) GetPreparer(ctx context.Context, resourceGroupName string, publicIPPrefixName string, expand string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"publicIpPrefixName": autorest.Encode("path", publicIPPrefixName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(expand) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client PublicIPPrefixesClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client PublicIPPrefixesClient) GetResponder(resp *http.Response) (result PublicIPPrefix, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all public IP prefixes in a resource group.
// Parameters:
// resourceGroupName - the name of the resource group.
func (client PublicIPPrefixesClient) List(ctx context.Context, resourceGroupName string) (result PublicIPPrefixListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.List")
defer func() {
sc := -1
if result.piplr.Response.Response != nil {
sc = result.piplr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.piplr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "List", resp, "Failure sending request")
return
}
result.piplr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client PublicIPPrefixesClient) ListPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client PublicIPPrefixesClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client PublicIPPrefixesClient) ListResponder(resp *http.Response) (result PublicIPPrefixListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client PublicIPPrefixesClient) listNextResults(ctx context.Context, lastResults PublicIPPrefixListResult) (result PublicIPPrefixListResult, err error) {
req, err := lastResults.publicIPPrefixListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client PublicIPPrefixesClient) ListComplete(ctx context.Context, resourceGroupName string) (result PublicIPPrefixListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx, resourceGroupName)
return
}
// ListAll gets all the public IP prefixes in a subscription.
func (client PublicIPPrefixesClient) ListAll(ctx context.Context) (result PublicIPPrefixListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.ListAll")
defer func() {
sc := -1
if result.piplr.Response.Response != nil {
sc = result.piplr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listAllNextResults
req, err := client.ListAllPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "ListAll", nil, "Failure preparing request")
return
}
resp, err := client.ListAllSender(req)
if err != nil {
result.piplr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "ListAll", resp, "Failure sending request")
return
}
result.piplr, err = client.ListAllResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "ListAll", resp, "Failure responding to request")
}
return
}
// ListAllPreparer prepares the ListAll request.
func (client PublicIPPrefixesClient) ListAllPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPPrefixes", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListAllSender sends the ListAll request. The method will close the
// http.Response Body if it receives an error.
func (client PublicIPPrefixesClient) ListAllSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListAllResponder handles the response to the ListAll request. The method always
// closes the http.Response Body.
func (client PublicIPPrefixesClient) ListAllResponder(resp *http.Response) (result PublicIPPrefixListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listAllNextResults retrieves the next set of results, if any.
func (client PublicIPPrefixesClient) listAllNextResults(ctx context.Context, lastResults PublicIPPrefixListResult) (result PublicIPPrefixListResult, err error) {
req, err := lastResults.publicIPPrefixListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "listAllNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListAllSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "listAllNextResults", resp, "Failure sending next results request")
}
result, err = client.ListAllResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "listAllNextResults", resp, "Failure responding to next results request")
}
return
}
// ListAllComplete enumerates all values, automatically crossing page boundaries as required.
func (client PublicIPPrefixesClient) ListAllComplete(ctx context.Context) (result PublicIPPrefixListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.ListAll")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListAll(ctx)
return
}
// UpdateTags updates public IP prefix tags.
// Parameters:
// resourceGroupName - the name of the resource group.
// publicIPPrefixName - the name of the public IP prefix.
// parameters - parameters supplied to update public IP prefix tags.
func (client PublicIPPrefixesClient) UpdateTags(ctx context.Context, resourceGroupName string, publicIPPrefixName string, parameters TagsObject) (result PublicIPPrefixesUpdateTagsFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/PublicIPPrefixesClient.UpdateTags")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.UpdateTagsPreparer(ctx, resourceGroupName, publicIPPrefixName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "UpdateTags", nil, "Failure preparing request")
return
}
result, err = client.UpdateTagsSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.PublicIPPrefixesClient", "UpdateTags", result.Response(), "Failure sending request")
return
}
return
}
// UpdateTagsPreparer prepares the UpdateTags request.
func (client PublicIPPrefixesClient) UpdateTagsPreparer(ctx context.Context, resourceGroupName string, publicIPPrefixName string, parameters TagsObject) (*http.Request, error) {
pathParameters := map[string]interface{}{
"publicIpPrefixName": autorest.Encode("path", publicIPPrefixName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-07-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateTagsSender sends the UpdateTags request. The method will close the
// http.Response Body if it receives an error.
func (client PublicIPPrefixesClient) UpdateTagsSender(req *http.Request) (future PublicIPPrefixesUpdateTagsFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// UpdateTagsResponder handles the response to the UpdateTags request. The method always
// closes the http.Response Body.
func (client PublicIPPrefixesClient) UpdateTagsResponder(resp *http.Response) (result PublicIPPrefix, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| {
return NewPublicIPPrefixesClientWithBaseURI(DefaultBaseURI, subscriptionID)
} |
asgi.py | """
An ASGI middleware.
Based on Tom Christie's `sentry-asgi <https://github.com/encode/sentry-asgi>`_.
"""
import asyncio
import inspect
import urllib
from sentry_sdk._functools import partial
from sentry_sdk._types import MYPY
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk.utils import (
ContextVar,
event_from_exception,
transaction_from_function,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
)
from sentry_sdk.tracing import Transaction
if MYPY:
from typing import Dict
from typing import Any
from typing import Optional
from typing import Callable
from typing_extensions import Literal
from sentry_sdk._types import Event, Hint
_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
_DEFAULT_TRANSACTION_NAME = "generic ASGI request"
def _capture_exception(hub, exc):
# type: (Hub, Any) -> None
# Check client here as it might have been unset while streaming response
if hub.client is not None:
event, hint = event_from_exception( | exc,
client_options=hub.client.options,
mechanism={"type": "asgi", "handled": False},
)
hub.capture_event(event, hint=hint)
def _looks_like_asgi3(app):
# type: (Any) -> bool
"""
Try to figure out if an application object supports ASGI3.
This is how uvicorn figures out the application version as well.
"""
if inspect.isclass(app):
return hasattr(app, "__await__")
elif inspect.isfunction(app):
return asyncio.iscoroutinefunction(app)
else:
call = getattr(app, "__call__", None) # noqa
return asyncio.iscoroutinefunction(call)
class SentryAsgiMiddleware:
__slots__ = ("app", "__call__")
def __init__(self, app, unsafe_context_data=False):
# type: (Any, bool) -> None
"""
Instrument an ASGI application with Sentry. Provides HTTP/websocket
data to sent events and basic handling for exceptions bubbling up
through the middleware.
:param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
"""
if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
raise RuntimeError(
"The ASGI middleware for Sentry requires Python 3.7+ "
"or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
)
self.app = app
if _looks_like_asgi3(app):
self.__call__ = self._run_asgi3 # type: Callable[..., Any]
else:
self.__call__ = self._run_asgi2
def _run_asgi2(self, scope):
# type: (Any) -> Any
async def inner(receive, send):
# type: (Any, Any) -> Any
return await self._run_app(scope, lambda: self.app(scope)(receive, send))
return inner
async def _run_asgi3(self, scope, receive, send):
# type: (Any, Any, Any) -> Any
return await self._run_app(scope, lambda: self.app(scope, receive, send))
async def _run_app(self, scope, callback):
# type: (Any, Any) -> Any
is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
if is_recursive_asgi_middleware:
try:
return await callback()
except Exception as exc:
_capture_exception(Hub.current, exc)
raise exc from None
_asgi_middleware_applied.set(True)
try:
hub = Hub(Hub.current)
with hub:
with hub.configure_scope() as sentry_scope:
sentry_scope.clear_breadcrumbs()
sentry_scope._name = "asgi"
processor = partial(self.event_processor, asgi_scope=scope)
sentry_scope.add_event_processor(processor)
ty = scope["type"]
if ty in ("http", "websocket"):
transaction = Transaction.continue_from_headers(
self._get_headers(scope),
op="{}.server".format(ty),
)
else:
transaction = Transaction(op="asgi.server")
transaction.name = _DEFAULT_TRANSACTION_NAME
transaction.set_tag("asgi.type", ty)
with hub.start_transaction(
transaction, custom_sampling_context={"asgi_scope": scope}
):
# XXX: Would be cool to have correct span status, but we
# would have to wrap send(). That is a bit hard to do with
# the current abstraction over ASGI 2/3.
try:
return await callback()
except Exception as exc:
_capture_exception(hub, exc)
raise exc from None
finally:
_asgi_middleware_applied.set(False)
def event_processor(self, event, hint, asgi_scope):
# type: (Event, Hint, Any) -> Optional[Event]
request_info = event.get("request", {})
ty = asgi_scope["type"]
if ty in ("http", "websocket"):
request_info["method"] = asgi_scope.get("method")
request_info["headers"] = headers = _filter_headers(
self._get_headers(asgi_scope)
)
request_info["query_string"] = self._get_query(asgi_scope)
request_info["url"] = self._get_url(
asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
)
client = asgi_scope.get("client")
if client and _should_send_default_pii():
request_info["env"] = {"REMOTE_ADDR": self._get_ip(asgi_scope)}
if (
event.get("transaction", _DEFAULT_TRANSACTION_NAME)
== _DEFAULT_TRANSACTION_NAME
):
endpoint = asgi_scope.get("endpoint")
# Webframeworks like Starlette mutate the ASGI env once routing is
# done, which is sometime after the request has started. If we have
# an endpoint, overwrite our generic transaction name.
if endpoint:
event["transaction"] = transaction_from_function(endpoint)
event["request"] = request_info
return event
# Helper functions for extracting request data.
#
# Note: Those functions are not public API. If you want to mutate request
# data to your liking it's recommended to use the `before_send` callback
# for that.
def _get_url(self, scope, default_scheme, host):
# type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
"""
Extract URL from the ASGI scope, without also including the querystring.
"""
scheme = scope.get("scheme", default_scheme)
server = scope.get("server", None)
path = scope.get("root_path", "") + scope.get("path", "")
if host:
return "%s://%s%s" % (scheme, host, path)
if server is not None:
host, port = server
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
if port != default_port:
return "%s://%s:%s%s" % (scheme, host, port, path)
return "%s://%s%s" % (scheme, host, path)
return path
def _get_query(self, scope):
# type: (Any) -> Any
"""
Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
"""
qs = scope.get("query_string")
if not qs:
return None
return urllib.parse.unquote(qs.decode("latin-1"))
def _get_ip(self, scope):
# type: (Any) -> str
"""
Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
"""
headers = self._get_headers(scope)
try:
return headers["x-forwarded-for"].split(",")[0].strip()
except (KeyError, IndexError):
pass
try:
return headers["x-real-ip"]
except KeyError:
pass
return scope.get("client")[0]
def _get_headers(self, scope):
# type: (Any) -> Dict[str, str]
"""
Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
"""
headers = {} # type: Dict[str, str]
for raw_key, raw_value in scope["headers"]:
key = raw_key.decode("latin-1")
value = raw_value.decode("latin-1")
if key in headers:
headers[key] = headers[key] + ", " + value
else:
headers[key] = value
return headers | |
component.tsx | import CropSquareIcon from '@mui/icons-material/CropSquare';
import { Box, Chip, Grid, Typography } from '@mui/material';
import BigNumber from 'bignumber.js';
import { useEffect, useState } from 'react';
import { getEmitter } from '../../../../util/emitter.util';
import { GlobalEvent } from '../../../../util/types';
import { BootstrapTooltip } from '../../../common/components/tooltip';
export const Footer: React.FC = () => {
const [networkBlockHeight, setNetworkBlockHeight] = useState(new BigNumber(0))
const [rotated, setRotated] = useState(false)
const emitter = getEmitter(); | const handleEvent = (height: BigNumber) => {
const heightString = height.toString();
let heightRef = height;
if (heightString.length > 1) {
heightRef = new BigNumber(heightString.substr(heightString.length - 1, heightString.length))
}
setRotated(heightRef.mod(2).eq(0));
setNetworkBlockHeight(height);
}
emitter.on(GlobalEvent.NetworkBlockHeightChanged, handleEvent)
return () => {
emitter.off(GlobalEvent.NetworkBlockHeightChanged, handleEvent)
};
}, [emitter]);
return (
<Box sx={{ py: "10px", px: "24px" }}>
<Grid container direction="row" justifyContent="flex-end" alignItems="center" spacing={1}>
<Grid item>
<BootstrapTooltip title="Network block height" placement="top" arrow>
<Typography variant="caption" sx={{ verticalAlign: "middle", display: "inline-flex" }}>
<CropSquareIcon sx={{
color: "gray",
mr: "2px",
transform: rotated ? "rotate(45deg)" : "rotate(0deg)",
fontSize: "17px"
}} />
{networkBlockHeight.toString()}
</Typography>
</BootstrapTooltip>
</Grid>
<Grid item>
<BootstrapTooltip title="Version" placement="top" arrow>
<Chip label={'v' + process.env.REACT_APP_VERSION} variant="outlined" size="small" />
</BootstrapTooltip>
</Grid>
</Grid>
</Box>
)
} |
useEffect(() => { |
lvm.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright (c) 2011 Piston Cloud Computing, Inc
# Copyright (c) 2011 OpenStack Foundation
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import units
import six
import nova.conf
from nova import exception
from nova.i18n import _
from nova.i18n import _LW
from nova.virt.libvirt import utils
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
def create_volume(vg, lv, size, sparse=False):
"""Create LVM image.
Creates a LVM image with given size.
:param vg: existing volume group which should hold this image
:param lv: name for this image (logical volume)
:size: size of image in bytes
:sparse: create sparse logical volume
"""
vg_info = get_volume_group_info(vg)
free_space = vg_info['free']
def check_size(vg, lv, size):
if size > free_space:
raise RuntimeError(_('Insufficient Space on Volume Group %(vg)s.'
' Only %(free_space)db available,'
' but %(size)d bytes required'
' by volume %(lv)s.') %
{'vg': vg,
'free_space': free_space,
'size': size,
'lv': lv})
if sparse:
preallocated_space = 64 * units.Mi
check_size(vg, lv, preallocated_space)
if free_space < size:
LOG.warning(_LW('Volume group %(vg)s will not be able'
' to hold sparse volume %(lv)s.'
' Virtual volume size is %(size)d bytes,'
' but free space on volume group is'
' only %(free_space)db.'),
{'vg': vg,
'free_space': free_space,
'size': size,
'lv': lv})
cmd = ('lvcreate', '-L', '%db' % preallocated_space,
'--virtualsize', '%db' % size, '-n', lv, vg)
else:
check_size(vg, lv, size)
cmd = ('lvcreate', '-L', '%db' % size, '-n', lv, vg)
utils.execute(*cmd, run_as_root=True, attempts=3)
def get_volume_group_info(vg):
"""Return free/used/total space info for a volume group in bytes
:param vg: volume group name
:returns: A dict containing:
:total: How big the filesystem is (in bytes)
:free: How much space is free (in bytes)
:used: How much space is used (in bytes)
"""
out, err = utils.execute('vgs', '--noheadings', '--nosuffix',
'--separator', '|',
'--units', 'b', '-o', 'vg_size,vg_free', vg,
run_as_root=True)
info = out.split('|')
if len(info) != 2:
raise RuntimeError(_("vg %s must be LVM volume group") % vg)
return {'total': int(info[0]),
'free': int(info[1]),
'used': int(info[0]) - int(info[1])}
def list_volumes(vg):
|
def volume_info(path):
"""Get logical volume info.
:param path: logical volume path
:returns: Return a dict object including info of given logical volume
: Data format example
: {'#Seg': '1', 'Move': '', 'Log': '', 'Meta%': '', 'Min': '-1',
: ...
: 'Free': '9983', 'LV': 'volume-aaa', 'Host': 'xyz.com',
: 'Active': 'active', 'Path': '/dev/vg/volume-aaa', '#LV': '3',
: 'Maj': '-1', 'VSize': '50.00g', 'VFree': '39.00g', 'Pool': '',
: 'VG Tags': '', 'KMaj': '253', 'Convert': '', 'LProfile': '',
: '#Ext': '12799', 'Attr': '-wi-a-----', 'VG': 'vg',
: ...
: 'LSize': '1.00g', '#PV': '1', '#VMdaCps': 'unmanaged'}
"""
out, err = utils.execute('lvs', '-o', 'vg_all,lv_all',
'--separator', '|', path, run_as_root=True)
info = [line.split('|') for line in out.splitlines()]
if len(info) != 2:
raise RuntimeError(_("Path %s must be LVM logical volume") % path)
return dict(zip(*info))
def get_volume_size(path):
"""Get logical volume size in bytes.
:param path: logical volume path
:raises: processutils.ProcessExecutionError if getting the volume size
fails in some unexpected way.
:raises: exception.VolumeBDMPathNotFound if the volume path does not exist.
"""
try:
out, _err = utils.execute('blockdev', '--getsize64', path,
run_as_root=True)
except processutils.ProcessExecutionError:
if not utils.path_exists(path):
raise exception.VolumeBDMPathNotFound(path=path)
else:
raise
return int(out)
def _zero_volume(path, volume_size):
"""Write zeros over the specified path
:param path: logical volume path
:param size: number of zeros to write
"""
bs = units.Mi
direct_flags = ('oflag=direct',)
sync_flags = ()
remaining_bytes = volume_size
# The loop efficiently writes zeros using dd,
# and caters for versions of dd that don't have
# the easier to use iflag=count_bytes option.
while remaining_bytes:
zero_blocks = remaining_bytes / bs
seek_blocks = (volume_size - remaining_bytes) / bs
zero_cmd = ('dd', 'bs=%s' % bs,
'if=/dev/zero', 'of=%s' % path,
'seek=%s' % seek_blocks, 'count=%s' % zero_blocks)
zero_cmd += direct_flags
zero_cmd += sync_flags
if zero_blocks:
utils.execute(*zero_cmd, run_as_root=True)
remaining_bytes %= bs
bs /= units.Ki # Limit to 3 iterations
# Use O_DIRECT with initial block size and fdatasync otherwise
direct_flags = ()
sync_flags = ('conv=fdatasync',)
def clear_volume(path):
"""Obfuscate the logical volume.
:param path: logical volume path
"""
volume_clear = CONF.libvirt.volume_clear
if volume_clear == 'none':
return
volume_clear_size = int(CONF.libvirt.volume_clear_size) * units.Mi
try:
volume_size = get_volume_size(path)
except exception.VolumeBDMPathNotFound:
LOG.warning(_LW('ignoring missing logical volume %(path)s'),
{'path': path})
return
if volume_clear_size != 0 and volume_clear_size < volume_size:
volume_size = volume_clear_size
if volume_clear == 'zero':
# NOTE(p-draigbrady): we could use shred to do the zeroing
# with -n0 -z, however only versions >= 8.22 perform as well as dd
_zero_volume(path, volume_size)
elif volume_clear == 'shred':
utils.execute('shred', '-n3', '-s%d' % volume_size, path,
run_as_root=True)
def remove_volumes(paths):
"""Remove one or more logical volume."""
errors = []
for path in paths:
clear_volume(path)
lvremove = ('lvremove', '-f', path)
try:
utils.execute(*lvremove, attempts=3, run_as_root=True)
except processutils.ProcessExecutionError as exp:
errors.append(six.text_type(exp))
if errors:
raise exception.VolumesNotRemoved(reason=(', ').join(errors))
| """List logical volumes paths for given volume group.
:param vg: volume group name
:returns: Return a logical volume list for given volume group
: Data format example
: ['volume-aaa', 'volume-bbb', 'volume-ccc']
"""
out, err = utils.execute('lvs', '--noheadings', '-o', 'lv_name', vg,
run_as_root=True)
return [line.strip() for line in out.splitlines()] |
config.py | import os
class Config():
DEBUG = False
TESTING = False
JWT_SECRET_KEY = 'jwt-secret-string'
JWT_BLACKLIST_ENABLED = True
JWT_BLACKLIST_TOKEN_CHECKS = ['access', 'refresh']
DB_HOST = os.environ.get('DB_HOST')
DB_USERNAME = os.environ.get('DB_USERNAME')
DB_PASS = os.environ.get('DB_PASS')
DB_NAME = os.environ.get('DB_NAME')
DB_PORT = os.environ.get('DB_PORT')
MAIL_DEFAULT_SENDER = '[email protected]'
MAIL_SERVER = os.environ.get('MAIL_SERVER')
MAIL_PORT = os.environ.get('MAIL_PORT')
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS')
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
class DevelopmentConfig(Config):
DEBUG = True
ENV = "development"
class TestingConfig(Config):
TESTING = True
DB_NAME = os.environ.get('DB_NAME_TEST')
settings = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': Config, | } |
|
error.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct AddTagsToStreamError {
pub kind: AddTagsToStreamErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum AddTagsToStreamErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for AddTagsToStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
AddTagsToStreamErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
AddTagsToStreamErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
AddTagsToStreamErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
AddTagsToStreamErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
AddTagsToStreamErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for AddTagsToStreamError {
fn code(&self) -> Option<&str> {
AddTagsToStreamError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl AddTagsToStreamError {
pub fn new(kind: AddTagsToStreamErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: AddTagsToStreamErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: AddTagsToStreamErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
AddTagsToStreamErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
AddTagsToStreamErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
AddTagsToStreamErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
AddTagsToStreamErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for AddTagsToStreamError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
AddTagsToStreamErrorKind::InvalidArgumentException(_inner) => Some(_inner),
AddTagsToStreamErrorKind::ResourceInUseException(_inner) => Some(_inner),
AddTagsToStreamErrorKind::LimitExceededException(_inner) => Some(_inner),
AddTagsToStreamErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
AddTagsToStreamErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateStreamError {
pub kind: CreateStreamErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateStreamErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateStreamErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
CreateStreamErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
CreateStreamErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateStreamErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateStreamError {
fn code(&self) -> Option<&str> {
CreateStreamError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateStreamError {
pub fn new(kind: CreateStreamErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateStreamErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateStreamErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
CreateStreamErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, CreateStreamErrorKind::ResourceInUseException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, CreateStreamErrorKind::LimitExceededException(_))
}
}
impl std::error::Error for CreateStreamError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateStreamErrorKind::InvalidArgumentException(_inner) => Some(_inner),
CreateStreamErrorKind::ResourceInUseException(_inner) => Some(_inner),
CreateStreamErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateStreamErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DecreaseStreamRetentionPeriodError {
pub kind: DecreaseStreamRetentionPeriodErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DecreaseStreamRetentionPeriodErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DecreaseStreamRetentionPeriodError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DecreaseStreamRetentionPeriodErrorKind::InvalidArgumentException(_inner) => {
_inner.fmt(f)
}
DecreaseStreamRetentionPeriodErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DecreaseStreamRetentionPeriodErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DecreaseStreamRetentionPeriodErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
DecreaseStreamRetentionPeriodErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DecreaseStreamRetentionPeriodError {
fn code(&self) -> Option<&str> {
DecreaseStreamRetentionPeriodError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DecreaseStreamRetentionPeriodError {
pub fn new(kind: DecreaseStreamRetentionPeriodErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DecreaseStreamRetentionPeriodErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DecreaseStreamRetentionPeriodErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
DecreaseStreamRetentionPeriodErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
DecreaseStreamRetentionPeriodErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DecreaseStreamRetentionPeriodErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DecreaseStreamRetentionPeriodErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DecreaseStreamRetentionPeriodError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DecreaseStreamRetentionPeriodErrorKind::InvalidArgumentException(_inner) => {
Some(_inner)
}
DecreaseStreamRetentionPeriodErrorKind::ResourceInUseException(_inner) => Some(_inner),
DecreaseStreamRetentionPeriodErrorKind::LimitExceededException(_inner) => Some(_inner),
DecreaseStreamRetentionPeriodErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
DecreaseStreamRetentionPeriodErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteStreamError {
pub kind: DeleteStreamErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteStreamErrorKind {
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteStreamErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DeleteStreamErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DeleteStreamErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteStreamErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteStreamError {
fn code(&self) -> Option<&str> {
DeleteStreamError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteStreamError {
pub fn new(kind: DeleteStreamErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteStreamErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteStreamErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, DeleteStreamErrorKind::ResourceInUseException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, DeleteStreamErrorKind::LimitExceededException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteStreamErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteStreamError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteStreamErrorKind::ResourceInUseException(_inner) => Some(_inner),
DeleteStreamErrorKind::LimitExceededException(_inner) => Some(_inner),
DeleteStreamErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteStreamErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeregisterStreamConsumerError {
pub kind: DeregisterStreamConsumerErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeregisterStreamConsumerErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeregisterStreamConsumerError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeregisterStreamConsumerErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
DeregisterStreamConsumerErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DeregisterStreamConsumerErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeregisterStreamConsumerErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeregisterStreamConsumerError {
fn code(&self) -> Option<&str> {
DeregisterStreamConsumerError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeregisterStreamConsumerError {
pub fn new(kind: DeregisterStreamConsumerErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeregisterStreamConsumerErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeregisterStreamConsumerErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterStreamConsumerErrorKind::InvalidArgumentException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterStreamConsumerErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeregisterStreamConsumerErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeregisterStreamConsumerError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeregisterStreamConsumerErrorKind::InvalidArgumentException(_inner) => Some(_inner),
DeregisterStreamConsumerErrorKind::LimitExceededException(_inner) => Some(_inner),
DeregisterStreamConsumerErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeregisterStreamConsumerErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeLimitsError {
pub kind: DescribeLimitsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeLimitsErrorKind {
LimitExceededException(crate::error::LimitExceededException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeLimitsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeLimitsErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DescribeLimitsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeLimitsError {
fn code(&self) -> Option<&str> {
DescribeLimitsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeLimitsError {
pub fn new(kind: DescribeLimitsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeLimitsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeLimitsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DescribeLimitsErrorKind::LimitExceededException(_)
)
}
}
impl std::error::Error for DescribeLimitsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeLimitsErrorKind::LimitExceededException(_inner) => Some(_inner),
DescribeLimitsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStreamError {
pub kind: DescribeStreamErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStreamErrorKind {
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStreamErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DescribeStreamErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeStreamErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStreamError {
fn code(&self) -> Option<&str> {
DescribeStreamError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStreamError {
pub fn new(kind: DescribeStreamErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStreamErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStreamErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeStreamError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStreamErrorKind::LimitExceededException(_inner) => Some(_inner),
DescribeStreamErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeStreamErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStreamConsumerError {
pub kind: DescribeStreamConsumerErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStreamConsumerErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStreamConsumerError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStreamConsumerErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
DescribeStreamConsumerErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DescribeStreamConsumerErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeStreamConsumerErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStreamConsumerError {
fn code(&self) -> Option<&str> {
DescribeStreamConsumerError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStreamConsumerError {
pub fn new(kind: DescribeStreamConsumerErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStreamConsumerErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStreamConsumerErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamConsumerErrorKind::InvalidArgumentException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamConsumerErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamConsumerErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeStreamConsumerError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStreamConsumerErrorKind::InvalidArgumentException(_inner) => Some(_inner),
DescribeStreamConsumerErrorKind::LimitExceededException(_inner) => Some(_inner),
DescribeStreamConsumerErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeStreamConsumerErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStreamSummaryError {
pub kind: DescribeStreamSummaryErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStreamSummaryErrorKind {
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStreamSummaryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStreamSummaryErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DescribeStreamSummaryErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeStreamSummaryErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStreamSummaryError {
fn code(&self) -> Option<&str> {
DescribeStreamSummaryError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStreamSummaryError {
pub fn new(kind: DescribeStreamSummaryErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStreamSummaryErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStreamSummaryErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamSummaryErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStreamSummaryErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeStreamSummaryError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStreamSummaryErrorKind::LimitExceededException(_inner) => Some(_inner),
DescribeStreamSummaryErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeStreamSummaryErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DisableEnhancedMonitoringError {
pub kind: DisableEnhancedMonitoringErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DisableEnhancedMonitoringErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DisableEnhancedMonitoringError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DisableEnhancedMonitoringErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
DisableEnhancedMonitoringErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DisableEnhancedMonitoringErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
DisableEnhancedMonitoringErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DisableEnhancedMonitoringErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DisableEnhancedMonitoringError {
fn code(&self) -> Option<&str> {
DisableEnhancedMonitoringError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DisableEnhancedMonitoringError {
pub fn new(kind: DisableEnhancedMonitoringErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DisableEnhancedMonitoringErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DisableEnhancedMonitoringErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
DisableEnhancedMonitoringErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
DisableEnhancedMonitoringErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
DisableEnhancedMonitoringErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DisableEnhancedMonitoringErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DisableEnhancedMonitoringError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DisableEnhancedMonitoringErrorKind::InvalidArgumentException(_inner) => Some(_inner),
DisableEnhancedMonitoringErrorKind::ResourceInUseException(_inner) => Some(_inner),
DisableEnhancedMonitoringErrorKind::LimitExceededException(_inner) => Some(_inner),
DisableEnhancedMonitoringErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DisableEnhancedMonitoringErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct EnableEnhancedMonitoringError {
pub kind: EnableEnhancedMonitoringErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum EnableEnhancedMonitoringErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for EnableEnhancedMonitoringError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
EnableEnhancedMonitoringErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
EnableEnhancedMonitoringErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
EnableEnhancedMonitoringErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
EnableEnhancedMonitoringErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
EnableEnhancedMonitoringErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for EnableEnhancedMonitoringError {
fn code(&self) -> Option<&str> {
EnableEnhancedMonitoringError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl EnableEnhancedMonitoringError {
pub fn new(kind: EnableEnhancedMonitoringErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: EnableEnhancedMonitoringErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: EnableEnhancedMonitoringErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
EnableEnhancedMonitoringErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
EnableEnhancedMonitoringErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
EnableEnhancedMonitoringErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
EnableEnhancedMonitoringErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for EnableEnhancedMonitoringError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
EnableEnhancedMonitoringErrorKind::InvalidArgumentException(_inner) => Some(_inner),
EnableEnhancedMonitoringErrorKind::ResourceInUseException(_inner) => Some(_inner),
EnableEnhancedMonitoringErrorKind::LimitExceededException(_inner) => Some(_inner),
EnableEnhancedMonitoringErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
EnableEnhancedMonitoringErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetRecordsError {
pub kind: GetRecordsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetRecordsErrorKind {
KmsAccessDeniedException(crate::error::KmsAccessDeniedException),
KmsDisabledException(crate::error::KmsDisabledException),
KmsNotFoundException(crate::error::KmsNotFoundException),
ExpiredIteratorException(crate::error::ExpiredIteratorException),
KmsOptInRequired(crate::error::KmsOptInRequired),
KmsThrottlingException(crate::error::KmsThrottlingException),
ProvisionedThroughputExceededException(crate::error::ProvisionedThroughputExceededException),
InvalidArgumentException(crate::error::InvalidArgumentException),
KmsInvalidStateException(crate::error::KmsInvalidStateException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetRecordsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetRecordsErrorKind::KmsAccessDeniedException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::KmsDisabledException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::KmsNotFoundException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::ExpiredIteratorException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::KmsOptInRequired(_inner) => _inner.fmt(f),
GetRecordsErrorKind::KmsThrottlingException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::ProvisionedThroughputExceededException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::KmsInvalidStateException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetRecordsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetRecordsError {
fn code(&self) -> Option<&str> {
GetRecordsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetRecordsError {
pub fn new(kind: GetRecordsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetRecordsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetRecordsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_kms_access_denied_exception(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::KmsAccessDeniedException(_))
}
pub fn is_kms_disabled_exception(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::KmsDisabledException(_))
}
pub fn is_kms_not_found_exception(&self) -> bool |
pub fn is_expired_iterator_exception(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::ExpiredIteratorException(_))
}
pub fn is_kms_opt_in_required(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::KmsOptInRequired(_))
}
pub fn is_kms_throttling_exception(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::KmsThrottlingException(_))
}
pub fn is_provisioned_throughput_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
GetRecordsErrorKind::ProvisionedThroughputExceededException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::InvalidArgumentException(_))
}
pub fn is_kms_invalid_state_exception(&self) -> bool {
matches!(&self.kind, GetRecordsErrorKind::KmsInvalidStateException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetRecordsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for GetRecordsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetRecordsErrorKind::KmsAccessDeniedException(_inner) => Some(_inner),
GetRecordsErrorKind::KmsDisabledException(_inner) => Some(_inner),
GetRecordsErrorKind::KmsNotFoundException(_inner) => Some(_inner),
GetRecordsErrorKind::ExpiredIteratorException(_inner) => Some(_inner),
GetRecordsErrorKind::KmsOptInRequired(_inner) => Some(_inner),
GetRecordsErrorKind::KmsThrottlingException(_inner) => Some(_inner),
GetRecordsErrorKind::ProvisionedThroughputExceededException(_inner) => Some(_inner),
GetRecordsErrorKind::InvalidArgumentException(_inner) => Some(_inner),
GetRecordsErrorKind::KmsInvalidStateException(_inner) => Some(_inner),
GetRecordsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetRecordsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetShardIteratorError {
pub kind: GetShardIteratorErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetShardIteratorErrorKind {
ProvisionedThroughputExceededException(crate::error::ProvisionedThroughputExceededException),
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetShardIteratorError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetShardIteratorErrorKind::ProvisionedThroughputExceededException(_inner) => {
_inner.fmt(f)
}
GetShardIteratorErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
GetShardIteratorErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
GetShardIteratorErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetShardIteratorError {
fn code(&self) -> Option<&str> {
GetShardIteratorError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetShardIteratorError {
pub fn new(kind: GetShardIteratorErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetShardIteratorErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetShardIteratorErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_provisioned_throughput_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
GetShardIteratorErrorKind::ProvisionedThroughputExceededException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
GetShardIteratorErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
GetShardIteratorErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for GetShardIteratorError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetShardIteratorErrorKind::ProvisionedThroughputExceededException(_inner) => {
Some(_inner)
}
GetShardIteratorErrorKind::InvalidArgumentException(_inner) => Some(_inner),
GetShardIteratorErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
GetShardIteratorErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct IncreaseStreamRetentionPeriodError {
pub kind: IncreaseStreamRetentionPeriodErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum IncreaseStreamRetentionPeriodErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for IncreaseStreamRetentionPeriodError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
IncreaseStreamRetentionPeriodErrorKind::InvalidArgumentException(_inner) => {
_inner.fmt(f)
}
IncreaseStreamRetentionPeriodErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
IncreaseStreamRetentionPeriodErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
IncreaseStreamRetentionPeriodErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
IncreaseStreamRetentionPeriodErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for IncreaseStreamRetentionPeriodError {
fn code(&self) -> Option<&str> {
IncreaseStreamRetentionPeriodError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl IncreaseStreamRetentionPeriodError {
pub fn new(kind: IncreaseStreamRetentionPeriodErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: IncreaseStreamRetentionPeriodErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: IncreaseStreamRetentionPeriodErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
IncreaseStreamRetentionPeriodErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
IncreaseStreamRetentionPeriodErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
IncreaseStreamRetentionPeriodErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
IncreaseStreamRetentionPeriodErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for IncreaseStreamRetentionPeriodError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
IncreaseStreamRetentionPeriodErrorKind::InvalidArgumentException(_inner) => {
Some(_inner)
}
IncreaseStreamRetentionPeriodErrorKind::ResourceInUseException(_inner) => Some(_inner),
IncreaseStreamRetentionPeriodErrorKind::LimitExceededException(_inner) => Some(_inner),
IncreaseStreamRetentionPeriodErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
IncreaseStreamRetentionPeriodErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListShardsError {
pub kind: ListShardsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListShardsErrorKind {
ExpiredNextTokenException(crate::error::ExpiredNextTokenException),
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListShardsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListShardsErrorKind::ExpiredNextTokenException(_inner) => _inner.fmt(f),
ListShardsErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
ListShardsErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
ListShardsErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
ListShardsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
ListShardsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListShardsError {
fn code(&self) -> Option<&str> {
ListShardsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListShardsError {
pub fn new(kind: ListShardsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListShardsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListShardsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_expired_next_token_exception(&self) -> bool {
matches!(
&self.kind,
ListShardsErrorKind::ExpiredNextTokenException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(&self.kind, ListShardsErrorKind::InvalidArgumentException(_))
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, ListShardsErrorKind::ResourceInUseException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, ListShardsErrorKind::LimitExceededException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
ListShardsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for ListShardsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListShardsErrorKind::ExpiredNextTokenException(_inner) => Some(_inner),
ListShardsErrorKind::InvalidArgumentException(_inner) => Some(_inner),
ListShardsErrorKind::ResourceInUseException(_inner) => Some(_inner),
ListShardsErrorKind::LimitExceededException(_inner) => Some(_inner),
ListShardsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
ListShardsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStreamConsumersError {
pub kind: ListStreamConsumersErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStreamConsumersErrorKind {
ExpiredNextTokenException(crate::error::ExpiredNextTokenException),
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStreamConsumersError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStreamConsumersErrorKind::ExpiredNextTokenException(_inner) => _inner.fmt(f),
ListStreamConsumersErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
ListStreamConsumersErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
ListStreamConsumersErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
ListStreamConsumersErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
ListStreamConsumersErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStreamConsumersError {
fn code(&self) -> Option<&str> {
ListStreamConsumersError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStreamConsumersError {
pub fn new(kind: ListStreamConsumersErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStreamConsumersErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStreamConsumersErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_expired_next_token_exception(&self) -> bool {
matches!(
&self.kind,
ListStreamConsumersErrorKind::ExpiredNextTokenException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
ListStreamConsumersErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
ListStreamConsumersErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
ListStreamConsumersErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
ListStreamConsumersErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for ListStreamConsumersError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStreamConsumersErrorKind::ExpiredNextTokenException(_inner) => Some(_inner),
ListStreamConsumersErrorKind::InvalidArgumentException(_inner) => Some(_inner),
ListStreamConsumersErrorKind::ResourceInUseException(_inner) => Some(_inner),
ListStreamConsumersErrorKind::LimitExceededException(_inner) => Some(_inner),
ListStreamConsumersErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
ListStreamConsumersErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStreamsError {
pub kind: ListStreamsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStreamsErrorKind {
LimitExceededException(crate::error::LimitExceededException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStreamsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStreamsErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
ListStreamsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStreamsError {
fn code(&self) -> Option<&str> {
ListStreamsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStreamsError {
pub fn new(kind: ListStreamsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStreamsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStreamsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, ListStreamsErrorKind::LimitExceededException(_))
}
}
impl std::error::Error for ListStreamsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStreamsErrorKind::LimitExceededException(_inner) => Some(_inner),
ListStreamsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListTagsForStreamError {
pub kind: ListTagsForStreamErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListTagsForStreamErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListTagsForStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListTagsForStreamErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
ListTagsForStreamErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
ListTagsForStreamErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
ListTagsForStreamErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListTagsForStreamError {
fn code(&self) -> Option<&str> {
ListTagsForStreamError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListTagsForStreamError {
pub fn new(kind: ListTagsForStreamErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListTagsForStreamErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListTagsForStreamErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForStreamErrorKind::InvalidArgumentException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForStreamErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForStreamErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for ListTagsForStreamError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListTagsForStreamErrorKind::InvalidArgumentException(_inner) => Some(_inner),
ListTagsForStreamErrorKind::LimitExceededException(_inner) => Some(_inner),
ListTagsForStreamErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
ListTagsForStreamErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct MergeShardsError {
pub kind: MergeShardsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum MergeShardsErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for MergeShardsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
MergeShardsErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
MergeShardsErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
MergeShardsErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
MergeShardsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
MergeShardsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for MergeShardsError {
fn code(&self) -> Option<&str> {
MergeShardsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl MergeShardsError {
pub fn new(kind: MergeShardsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: MergeShardsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: MergeShardsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
MergeShardsErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, MergeShardsErrorKind::ResourceInUseException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, MergeShardsErrorKind::LimitExceededException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
MergeShardsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for MergeShardsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
MergeShardsErrorKind::InvalidArgumentException(_inner) => Some(_inner),
MergeShardsErrorKind::ResourceInUseException(_inner) => Some(_inner),
MergeShardsErrorKind::LimitExceededException(_inner) => Some(_inner),
MergeShardsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
MergeShardsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct PutRecordError {
pub kind: PutRecordErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum PutRecordErrorKind {
KmsAccessDeniedException(crate::error::KmsAccessDeniedException),
KmsDisabledException(crate::error::KmsDisabledException),
KmsNotFoundException(crate::error::KmsNotFoundException),
KmsOptInRequired(crate::error::KmsOptInRequired),
KmsThrottlingException(crate::error::KmsThrottlingException),
ProvisionedThroughputExceededException(crate::error::ProvisionedThroughputExceededException),
InvalidArgumentException(crate::error::InvalidArgumentException),
KmsInvalidStateException(crate::error::KmsInvalidStateException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for PutRecordError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
PutRecordErrorKind::KmsAccessDeniedException(_inner) => _inner.fmt(f),
PutRecordErrorKind::KmsDisabledException(_inner) => _inner.fmt(f),
PutRecordErrorKind::KmsNotFoundException(_inner) => _inner.fmt(f),
PutRecordErrorKind::KmsOptInRequired(_inner) => _inner.fmt(f),
PutRecordErrorKind::KmsThrottlingException(_inner) => _inner.fmt(f),
PutRecordErrorKind::ProvisionedThroughputExceededException(_inner) => _inner.fmt(f),
PutRecordErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
PutRecordErrorKind::KmsInvalidStateException(_inner) => _inner.fmt(f),
PutRecordErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
PutRecordErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for PutRecordError {
fn code(&self) -> Option<&str> {
PutRecordError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl PutRecordError {
pub fn new(kind: PutRecordErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: PutRecordErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: PutRecordErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_kms_access_denied_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::KmsAccessDeniedException(_))
}
pub fn is_kms_disabled_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::KmsDisabledException(_))
}
pub fn is_kms_not_found_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::KmsNotFoundException(_))
}
pub fn is_kms_opt_in_required(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::KmsOptInRequired(_))
}
pub fn is_kms_throttling_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::KmsThrottlingException(_))
}
pub fn is_provisioned_throughput_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
PutRecordErrorKind::ProvisionedThroughputExceededException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::InvalidArgumentException(_))
}
pub fn is_kms_invalid_state_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::KmsInvalidStateException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(&self.kind, PutRecordErrorKind::ResourceNotFoundException(_))
}
}
impl std::error::Error for PutRecordError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
PutRecordErrorKind::KmsAccessDeniedException(_inner) => Some(_inner),
PutRecordErrorKind::KmsDisabledException(_inner) => Some(_inner),
PutRecordErrorKind::KmsNotFoundException(_inner) => Some(_inner),
PutRecordErrorKind::KmsOptInRequired(_inner) => Some(_inner),
PutRecordErrorKind::KmsThrottlingException(_inner) => Some(_inner),
PutRecordErrorKind::ProvisionedThroughputExceededException(_inner) => Some(_inner),
PutRecordErrorKind::InvalidArgumentException(_inner) => Some(_inner),
PutRecordErrorKind::KmsInvalidStateException(_inner) => Some(_inner),
PutRecordErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
PutRecordErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct PutRecordsError {
pub kind: PutRecordsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum PutRecordsErrorKind {
KmsAccessDeniedException(crate::error::KmsAccessDeniedException),
KmsDisabledException(crate::error::KmsDisabledException),
KmsNotFoundException(crate::error::KmsNotFoundException),
KmsOptInRequired(crate::error::KmsOptInRequired),
KmsThrottlingException(crate::error::KmsThrottlingException),
ProvisionedThroughputExceededException(crate::error::ProvisionedThroughputExceededException),
InvalidArgumentException(crate::error::InvalidArgumentException),
KmsInvalidStateException(crate::error::KmsInvalidStateException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for PutRecordsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
PutRecordsErrorKind::KmsAccessDeniedException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::KmsDisabledException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::KmsNotFoundException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::KmsOptInRequired(_inner) => _inner.fmt(f),
PutRecordsErrorKind::KmsThrottlingException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::ProvisionedThroughputExceededException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::KmsInvalidStateException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
PutRecordsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for PutRecordsError {
fn code(&self) -> Option<&str> {
PutRecordsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl PutRecordsError {
pub fn new(kind: PutRecordsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: PutRecordsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: PutRecordsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_kms_access_denied_exception(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::KmsAccessDeniedException(_))
}
pub fn is_kms_disabled_exception(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::KmsDisabledException(_))
}
pub fn is_kms_not_found_exception(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::KmsNotFoundException(_))
}
pub fn is_kms_opt_in_required(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::KmsOptInRequired(_))
}
pub fn is_kms_throttling_exception(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::KmsThrottlingException(_))
}
pub fn is_provisioned_throughput_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
PutRecordsErrorKind::ProvisionedThroughputExceededException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::InvalidArgumentException(_))
}
pub fn is_kms_invalid_state_exception(&self) -> bool {
matches!(&self.kind, PutRecordsErrorKind::KmsInvalidStateException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
PutRecordsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for PutRecordsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
PutRecordsErrorKind::KmsAccessDeniedException(_inner) => Some(_inner),
PutRecordsErrorKind::KmsDisabledException(_inner) => Some(_inner),
PutRecordsErrorKind::KmsNotFoundException(_inner) => Some(_inner),
PutRecordsErrorKind::KmsOptInRequired(_inner) => Some(_inner),
PutRecordsErrorKind::KmsThrottlingException(_inner) => Some(_inner),
PutRecordsErrorKind::ProvisionedThroughputExceededException(_inner) => Some(_inner),
PutRecordsErrorKind::InvalidArgumentException(_inner) => Some(_inner),
PutRecordsErrorKind::KmsInvalidStateException(_inner) => Some(_inner),
PutRecordsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
PutRecordsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct RegisterStreamConsumerError {
pub kind: RegisterStreamConsumerErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum RegisterStreamConsumerErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for RegisterStreamConsumerError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
RegisterStreamConsumerErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
RegisterStreamConsumerErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
RegisterStreamConsumerErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
RegisterStreamConsumerErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
RegisterStreamConsumerErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for RegisterStreamConsumerError {
fn code(&self) -> Option<&str> {
RegisterStreamConsumerError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl RegisterStreamConsumerError {
pub fn new(kind: RegisterStreamConsumerErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: RegisterStreamConsumerErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: RegisterStreamConsumerErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
RegisterStreamConsumerErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
RegisterStreamConsumerErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
RegisterStreamConsumerErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
RegisterStreamConsumerErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for RegisterStreamConsumerError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
RegisterStreamConsumerErrorKind::InvalidArgumentException(_inner) => Some(_inner),
RegisterStreamConsumerErrorKind::ResourceInUseException(_inner) => Some(_inner),
RegisterStreamConsumerErrorKind::LimitExceededException(_inner) => Some(_inner),
RegisterStreamConsumerErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
RegisterStreamConsumerErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct RemoveTagsFromStreamError {
pub kind: RemoveTagsFromStreamErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum RemoveTagsFromStreamErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for RemoveTagsFromStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
RemoveTagsFromStreamErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
RemoveTagsFromStreamErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
RemoveTagsFromStreamErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
RemoveTagsFromStreamErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
RemoveTagsFromStreamErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for RemoveTagsFromStreamError {
fn code(&self) -> Option<&str> {
RemoveTagsFromStreamError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl RemoveTagsFromStreamError {
pub fn new(kind: RemoveTagsFromStreamErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: RemoveTagsFromStreamErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: RemoveTagsFromStreamErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
RemoveTagsFromStreamErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
RemoveTagsFromStreamErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
RemoveTagsFromStreamErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
RemoveTagsFromStreamErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for RemoveTagsFromStreamError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
RemoveTagsFromStreamErrorKind::InvalidArgumentException(_inner) => Some(_inner),
RemoveTagsFromStreamErrorKind::ResourceInUseException(_inner) => Some(_inner),
RemoveTagsFromStreamErrorKind::LimitExceededException(_inner) => Some(_inner),
RemoveTagsFromStreamErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
RemoveTagsFromStreamErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct SplitShardError {
pub kind: SplitShardErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum SplitShardErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for SplitShardError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
SplitShardErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
SplitShardErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
SplitShardErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
SplitShardErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
SplitShardErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for SplitShardError {
fn code(&self) -> Option<&str> {
SplitShardError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl SplitShardError {
pub fn new(kind: SplitShardErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: SplitShardErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: SplitShardErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(&self.kind, SplitShardErrorKind::InvalidArgumentException(_))
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, SplitShardErrorKind::ResourceInUseException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, SplitShardErrorKind::LimitExceededException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
SplitShardErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for SplitShardError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
SplitShardErrorKind::InvalidArgumentException(_inner) => Some(_inner),
SplitShardErrorKind::ResourceInUseException(_inner) => Some(_inner),
SplitShardErrorKind::LimitExceededException(_inner) => Some(_inner),
SplitShardErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
SplitShardErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StartStreamEncryptionError {
pub kind: StartStreamEncryptionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StartStreamEncryptionErrorKind {
KmsAccessDeniedException(crate::error::KmsAccessDeniedException),
KmsDisabledException(crate::error::KmsDisabledException),
KmsNotFoundException(crate::error::KmsNotFoundException),
KmsOptInRequired(crate::error::KmsOptInRequired),
KmsThrottlingException(crate::error::KmsThrottlingException),
InvalidArgumentException(crate::error::InvalidArgumentException),
KmsInvalidStateException(crate::error::KmsInvalidStateException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StartStreamEncryptionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StartStreamEncryptionErrorKind::KmsAccessDeniedException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::KmsDisabledException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::KmsNotFoundException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::KmsOptInRequired(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::KmsThrottlingException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::KmsInvalidStateException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
StartStreamEncryptionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StartStreamEncryptionError {
fn code(&self) -> Option<&str> {
StartStreamEncryptionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StartStreamEncryptionError {
pub fn new(kind: StartStreamEncryptionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StartStreamEncryptionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StartStreamEncryptionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_kms_access_denied_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::KmsAccessDeniedException(_)
)
}
pub fn is_kms_disabled_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::KmsDisabledException(_)
)
}
pub fn is_kms_not_found_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::KmsNotFoundException(_)
)
}
pub fn is_kms_opt_in_required(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::KmsOptInRequired(_)
)
}
pub fn is_kms_throttling_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::KmsThrottlingException(_)
)
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::InvalidArgumentException(_)
)
}
pub fn is_kms_invalid_state_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::KmsInvalidStateException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
StartStreamEncryptionErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for StartStreamEncryptionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StartStreamEncryptionErrorKind::KmsAccessDeniedException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::KmsDisabledException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::KmsNotFoundException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::KmsOptInRequired(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::KmsThrottlingException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::InvalidArgumentException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::KmsInvalidStateException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::ResourceInUseException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::LimitExceededException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
StartStreamEncryptionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StopStreamEncryptionError {
pub kind: StopStreamEncryptionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StopStreamEncryptionErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StopStreamEncryptionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StopStreamEncryptionErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
StopStreamEncryptionErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
StopStreamEncryptionErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
StopStreamEncryptionErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
StopStreamEncryptionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StopStreamEncryptionError {
fn code(&self) -> Option<&str> {
StopStreamEncryptionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StopStreamEncryptionError {
pub fn new(kind: StopStreamEncryptionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StopStreamEncryptionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StopStreamEncryptionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
StopStreamEncryptionErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
StopStreamEncryptionErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
StopStreamEncryptionErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
StopStreamEncryptionErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for StopStreamEncryptionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StopStreamEncryptionErrorKind::InvalidArgumentException(_inner) => Some(_inner),
StopStreamEncryptionErrorKind::ResourceInUseException(_inner) => Some(_inner),
StopStreamEncryptionErrorKind::LimitExceededException(_inner) => Some(_inner),
StopStreamEncryptionErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
StopStreamEncryptionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateShardCountError {
pub kind: UpdateShardCountErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateShardCountErrorKind {
InvalidArgumentException(crate::error::InvalidArgumentException),
ResourceInUseException(crate::error::ResourceInUseException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateShardCountError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateShardCountErrorKind::InvalidArgumentException(_inner) => _inner.fmt(f),
UpdateShardCountErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
UpdateShardCountErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
UpdateShardCountErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateShardCountErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateShardCountError {
fn code(&self) -> Option<&str> {
UpdateShardCountError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateShardCountError {
pub fn new(kind: UpdateShardCountErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateShardCountErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateShardCountErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_argument_exception(&self) -> bool {
matches!(
&self.kind,
UpdateShardCountErrorKind::InvalidArgumentException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
UpdateShardCountErrorKind::ResourceInUseException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
UpdateShardCountErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateShardCountErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for UpdateShardCountError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateShardCountErrorKind::InvalidArgumentException(_inner) => Some(_inner),
UpdateShardCountErrorKind::ResourceInUseException(_inner) => Some(_inner),
UpdateShardCountErrorKind::LimitExceededException(_inner) => Some(_inner),
UpdateShardCountErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateShardCountErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
/// <p>The requested resource could not be found. The stream might not be specified
/// correctly.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceNotFoundException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceNotFoundException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ResourceNotFoundException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceNotFoundException")?;
if let Some(inner_1) = &self.message {
write!(f, ": {}", inner_1)?;
}
Ok(())
}
}
impl std::error::Error for ResourceNotFoundException {}
/// See [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub mod resource_not_found_exception {
/// A builder for [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub fn build(self) -> crate::error::ResourceNotFoundException {
crate::error::ResourceNotFoundException {
message: self.message,
}
}
}
}
impl ResourceNotFoundException {
/// Creates a new builder-style object to manufacture [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub fn builder() -> crate::error::resource_not_found_exception::Builder {
crate::error::resource_not_found_exception::Builder::default()
}
}
/// <p>The requested resource exceeds the maximum number allowed, or the number of
/// concurrent stream requests exceeds the maximum number allowed. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct LimitExceededException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for LimitExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("LimitExceededException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl LimitExceededException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for LimitExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "LimitExceededException")?;
if let Some(inner_2) = &self.message {
write!(f, ": {}", inner_2)?;
}
Ok(())
}
}
impl std::error::Error for LimitExceededException {}
/// See [`LimitExceededException`](crate::error::LimitExceededException)
pub mod limit_exceeded_exception {
/// A builder for [`LimitExceededException`](crate::error::LimitExceededException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`LimitExceededException`](crate::error::LimitExceededException)
pub fn build(self) -> crate::error::LimitExceededException {
crate::error::LimitExceededException {
message: self.message,
}
}
}
}
impl LimitExceededException {
/// Creates a new builder-style object to manufacture [`LimitExceededException`](crate::error::LimitExceededException)
pub fn builder() -> crate::error::limit_exceeded_exception::Builder {
crate::error::limit_exceeded_exception::Builder::default()
}
}
/// <p>The resource is not available for this operation. For successful operation, the
/// resource must be in the <code>ACTIVE</code> state.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceInUseException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceInUseException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceInUseException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ResourceInUseException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceInUseException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceInUseException")?;
if let Some(inner_3) = &self.message {
write!(f, ": {}", inner_3)?;
}
Ok(())
}
}
impl std::error::Error for ResourceInUseException {}
/// See [`ResourceInUseException`](crate::error::ResourceInUseException)
pub mod resource_in_use_exception {
/// A builder for [`ResourceInUseException`](crate::error::ResourceInUseException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ResourceInUseException`](crate::error::ResourceInUseException)
pub fn build(self) -> crate::error::ResourceInUseException {
crate::error::ResourceInUseException {
message: self.message,
}
}
}
}
impl ResourceInUseException {
/// Creates a new builder-style object to manufacture [`ResourceInUseException`](crate::error::ResourceInUseException)
pub fn builder() -> crate::error::resource_in_use_exception::Builder {
crate::error::resource_in_use_exception::Builder::default()
}
}
/// <p>A specified parameter exceeds its restrictions, is not supported, or can't be used.
/// For more information, see the returned message.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidArgumentException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidArgumentException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidArgumentException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidArgumentException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidArgumentException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "InvalidArgumentException")?;
if let Some(inner_4) = &self.message {
write!(f, ": {}", inner_4)?;
}
Ok(())
}
}
impl std::error::Error for InvalidArgumentException {}
/// See [`InvalidArgumentException`](crate::error::InvalidArgumentException)
pub mod invalid_argument_exception {
/// A builder for [`InvalidArgumentException`](crate::error::InvalidArgumentException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidArgumentException`](crate::error::InvalidArgumentException)
pub fn build(self) -> crate::error::InvalidArgumentException {
crate::error::InvalidArgumentException {
message: self.message,
}
}
}
}
impl InvalidArgumentException {
/// Creates a new builder-style object to manufacture [`InvalidArgumentException`](crate::error::InvalidArgumentException)
pub fn builder() -> crate::error::invalid_argument_exception::Builder {
crate::error::invalid_argument_exception::Builder::default()
}
}
/// <p>The request was rejected because the state of the specified resource isn't valid
/// for this request. For more information, see <a href="https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html">How Key State Affects Use of a
/// Customer Master Key</a> in the <i>AWS Key Management Service Developer
/// Guide</i>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct KmsInvalidStateException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for KmsInvalidStateException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("KmsInvalidStateException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl KmsInvalidStateException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for KmsInvalidStateException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "KmsInvalidStateException [KMSInvalidStateException]")?;
if let Some(inner_5) = &self.message {
write!(f, ": {}", inner_5)?;
}
Ok(())
}
}
impl std::error::Error for KmsInvalidStateException {}
/// See [`KmsInvalidStateException`](crate::error::KmsInvalidStateException)
pub mod kms_invalid_state_exception {
/// A builder for [`KmsInvalidStateException`](crate::error::KmsInvalidStateException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`KmsInvalidStateException`](crate::error::KmsInvalidStateException)
pub fn build(self) -> crate::error::KmsInvalidStateException {
crate::error::KmsInvalidStateException {
message: self.message,
}
}
}
}
impl KmsInvalidStateException {
/// Creates a new builder-style object to manufacture [`KmsInvalidStateException`](crate::error::KmsInvalidStateException)
pub fn builder() -> crate::error::kms_invalid_state_exception::Builder {
crate::error::kms_invalid_state_exception::Builder::default()
}
}
/// <p>The request was denied due to request throttling. For more information about
/// throttling, see <a href="https://docs.aws.amazon.com/kms/latest/developerguide/limits.html#requests-per-second">Limits</a> in
/// the <i>AWS Key Management Service Developer Guide</i>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct KmsThrottlingException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for KmsThrottlingException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("KmsThrottlingException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl KmsThrottlingException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for KmsThrottlingException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "KmsThrottlingException [KMSThrottlingException]")?;
if let Some(inner_6) = &self.message {
write!(f, ": {}", inner_6)?;
}
Ok(())
}
}
impl std::error::Error for KmsThrottlingException {}
/// See [`KmsThrottlingException`](crate::error::KmsThrottlingException)
pub mod kms_throttling_exception {
/// A builder for [`KmsThrottlingException`](crate::error::KmsThrottlingException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`KmsThrottlingException`](crate::error::KmsThrottlingException)
pub fn build(self) -> crate::error::KmsThrottlingException {
crate::error::KmsThrottlingException {
message: self.message,
}
}
}
}
impl KmsThrottlingException {
/// Creates a new builder-style object to manufacture [`KmsThrottlingException`](crate::error::KmsThrottlingException)
pub fn builder() -> crate::error::kms_throttling_exception::Builder {
crate::error::kms_throttling_exception::Builder::default()
}
}
/// <p>The AWS access key ID needs a subscription for the service.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct KmsOptInRequired {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for KmsOptInRequired {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("KmsOptInRequired");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl KmsOptInRequired {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for KmsOptInRequired {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "KmsOptInRequired [KMSOptInRequired]")?;
if let Some(inner_7) = &self.message {
write!(f, ": {}", inner_7)?;
}
Ok(())
}
}
impl std::error::Error for KmsOptInRequired {}
/// See [`KmsOptInRequired`](crate::error::KmsOptInRequired)
pub mod kms_opt_in_required {
/// A builder for [`KmsOptInRequired`](crate::error::KmsOptInRequired)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`KmsOptInRequired`](crate::error::KmsOptInRequired)
pub fn build(self) -> crate::error::KmsOptInRequired {
crate::error::KmsOptInRequired {
message: self.message,
}
}
}
}
impl KmsOptInRequired {
/// Creates a new builder-style object to manufacture [`KmsOptInRequired`](crate::error::KmsOptInRequired)
pub fn builder() -> crate::error::kms_opt_in_required::Builder {
crate::error::kms_opt_in_required::Builder::default()
}
}
/// <p>The request was rejected because the specified entity or resource can't be
/// found.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct KmsNotFoundException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for KmsNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("KmsNotFoundException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl KmsNotFoundException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for KmsNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "KmsNotFoundException [KMSNotFoundException]")?;
if let Some(inner_8) = &self.message {
write!(f, ": {}", inner_8)?;
}
Ok(())
}
}
impl std::error::Error for KmsNotFoundException {}
/// See [`KmsNotFoundException`](crate::error::KmsNotFoundException)
pub mod kms_not_found_exception {
/// A builder for [`KmsNotFoundException`](crate::error::KmsNotFoundException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`KmsNotFoundException`](crate::error::KmsNotFoundException)
pub fn build(self) -> crate::error::KmsNotFoundException {
crate::error::KmsNotFoundException {
message: self.message,
}
}
}
}
impl KmsNotFoundException {
/// Creates a new builder-style object to manufacture [`KmsNotFoundException`](crate::error::KmsNotFoundException)
pub fn builder() -> crate::error::kms_not_found_exception::Builder {
crate::error::kms_not_found_exception::Builder::default()
}
}
/// <p>The request was rejected because the specified customer master key (CMK) isn't
/// enabled.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct KmsDisabledException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for KmsDisabledException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("KmsDisabledException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl KmsDisabledException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for KmsDisabledException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "KmsDisabledException [KMSDisabledException]")?;
if let Some(inner_9) = &self.message {
write!(f, ": {}", inner_9)?;
}
Ok(())
}
}
impl std::error::Error for KmsDisabledException {}
/// See [`KmsDisabledException`](crate::error::KmsDisabledException)
pub mod kms_disabled_exception {
/// A builder for [`KmsDisabledException`](crate::error::KmsDisabledException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`KmsDisabledException`](crate::error::KmsDisabledException)
pub fn build(self) -> crate::error::KmsDisabledException {
crate::error::KmsDisabledException {
message: self.message,
}
}
}
}
impl KmsDisabledException {
/// Creates a new builder-style object to manufacture [`KmsDisabledException`](crate::error::KmsDisabledException)
pub fn builder() -> crate::error::kms_disabled_exception::Builder {
crate::error::kms_disabled_exception::Builder::default()
}
}
/// <p>The ciphertext references a key that doesn't exist or that you don't have access
/// to.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct KmsAccessDeniedException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for KmsAccessDeniedException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("KmsAccessDeniedException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl KmsAccessDeniedException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for KmsAccessDeniedException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "KmsAccessDeniedException [KMSAccessDeniedException]")?;
if let Some(inner_10) = &self.message {
write!(f, ": {}", inner_10)?;
}
Ok(())
}
}
impl std::error::Error for KmsAccessDeniedException {}
/// See [`KmsAccessDeniedException`](crate::error::KmsAccessDeniedException)
pub mod kms_access_denied_exception {
/// A builder for [`KmsAccessDeniedException`](crate::error::KmsAccessDeniedException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`KmsAccessDeniedException`](crate::error::KmsAccessDeniedException)
pub fn build(self) -> crate::error::KmsAccessDeniedException {
crate::error::KmsAccessDeniedException {
message: self.message,
}
}
}
}
impl KmsAccessDeniedException {
/// Creates a new builder-style object to manufacture [`KmsAccessDeniedException`](crate::error::KmsAccessDeniedException)
pub fn builder() -> crate::error::kms_access_denied_exception::Builder {
crate::error::kms_access_denied_exception::Builder::default()
}
}
/// <p>The request rate for the stream is too high, or the requested data is too large for
/// the available throughput. Reduce the frequency or size of your requests. For more
/// information, see <a href="https://docs.aws.amazon.com/kinesis/latest/dev/service-sizes-and-limits.html">Streams Limits</a> in the
/// <i>Amazon Kinesis Data Streams Developer Guide</i>, and <a href="https://docs.aws.amazon.com/general/latest/gr/api-retries.html">Error Retries and
/// Exponential Backoff in AWS</a> in the <i>AWS General
/// Reference</i>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ProvisionedThroughputExceededException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ProvisionedThroughputExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ProvisionedThroughputExceededException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ProvisionedThroughputExceededException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ProvisionedThroughputExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ProvisionedThroughputExceededException")?;
if let Some(inner_11) = &self.message {
write!(f, ": {}", inner_11)?;
}
Ok(())
}
}
impl std::error::Error for ProvisionedThroughputExceededException {}
/// See [`ProvisionedThroughputExceededException`](crate::error::ProvisionedThroughputExceededException)
pub mod provisioned_throughput_exceeded_exception {
/// A builder for [`ProvisionedThroughputExceededException`](crate::error::ProvisionedThroughputExceededException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ProvisionedThroughputExceededException`](crate::error::ProvisionedThroughputExceededException)
pub fn build(self) -> crate::error::ProvisionedThroughputExceededException {
crate::error::ProvisionedThroughputExceededException {
message: self.message,
}
}
}
}
impl ProvisionedThroughputExceededException {
/// Creates a new builder-style object to manufacture [`ProvisionedThroughputExceededException`](crate::error::ProvisionedThroughputExceededException)
pub fn builder() -> crate::error::provisioned_throughput_exceeded_exception::Builder {
crate::error::provisioned_throughput_exceeded_exception::Builder::default()
}
}
/// <p>The pagination token passed to the operation is expired.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ExpiredNextTokenException {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ExpiredNextTokenException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ExpiredNextTokenException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ExpiredNextTokenException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ExpiredNextTokenException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ExpiredNextTokenException")?;
if let Some(inner_12) = &self.message {
write!(f, ": {}", inner_12)?;
}
Ok(())
}
}
impl std::error::Error for ExpiredNextTokenException {}
/// See [`ExpiredNextTokenException`](crate::error::ExpiredNextTokenException)
pub mod expired_next_token_exception {
/// A builder for [`ExpiredNextTokenException`](crate::error::ExpiredNextTokenException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ExpiredNextTokenException`](crate::error::ExpiredNextTokenException)
pub fn build(self) -> crate::error::ExpiredNextTokenException {
crate::error::ExpiredNextTokenException {
message: self.message,
}
}
}
}
impl ExpiredNextTokenException {
/// Creates a new builder-style object to manufacture [`ExpiredNextTokenException`](crate::error::ExpiredNextTokenException)
pub fn builder() -> crate::error::expired_next_token_exception::Builder {
crate::error::expired_next_token_exception::Builder::default()
}
}
/// <p>The provided iterator exceeds the maximum age allowed.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ExpiredIteratorException {
/// <p>A message that provides information about the error.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ExpiredIteratorException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ExpiredIteratorException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ExpiredIteratorException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ExpiredIteratorException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ExpiredIteratorException")?;
if let Some(inner_13) = &self.message {
write!(f, ": {}", inner_13)?;
}
Ok(())
}
}
impl std::error::Error for ExpiredIteratorException {}
/// See [`ExpiredIteratorException`](crate::error::ExpiredIteratorException)
pub mod expired_iterator_exception {
/// A builder for [`ExpiredIteratorException`](crate::error::ExpiredIteratorException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A message that provides information about the error.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ExpiredIteratorException`](crate::error::ExpiredIteratorException)
pub fn build(self) -> crate::error::ExpiredIteratorException {
crate::error::ExpiredIteratorException {
message: self.message,
}
}
}
}
impl ExpiredIteratorException {
/// Creates a new builder-style object to manufacture [`ExpiredIteratorException`](crate::error::ExpiredIteratorException)
pub fn builder() -> crate::error::expired_iterator_exception::Builder {
crate::error::expired_iterator_exception::Builder::default()
}
}
| {
matches!(&self.kind, GetRecordsErrorKind::KmsNotFoundException(_))
} |
client.go | /*
Package gortsplib is a RTSP 1.0 library for the Go programming language,
written for rtsp-simple-server.
Examples are available at https://github.com/aler9/gortsplib/tree/master/examples
*/
package gortsplib
import (
"context"
"crypto/tls"
"net"
"time"
"github.com/aler9/gortsplib/pkg/base"
"github.com/aler9/gortsplib/pkg/headers"
)
// DefaultClient is the default Client.
var DefaultClient = &Client{}
// Dial connects to a server.
func Dial(scheme string, host string) (*ClientConn, error) {
return DefaultClient.Dial(scheme, host)
}
// DialRead connects to a server and starts reading all tracks.
func DialRead(address string) (*ClientConn, error) {
return DefaultClient.DialRead(address)
}
// DialPublish connects to a server and starts publishing the tracks.
func DialPublish(address string, tracks Tracks) (*ClientConn, error) {
return DefaultClient.DialPublish(address, tracks)
}
// Client is a RTSP client.
type Client struct {
//
// connection
//
// timeout of read operations.
// It defaults to 10 seconds.
ReadTimeout time.Duration
// timeout of write operations.
// It defaults to 10 seconds.
WriteTimeout time.Duration
// a TLS configuration to connect to TLS (RTSPS) servers.
// It defaults to &tls.Config{InsecureSkipVerify:true}
TLSConfig *tls.Config
//
// initialization
//
// disable being redirected to other servers, that can happen during Describe().
// It defaults to false.
RedirectDisable bool
// enable communication with servers which don't provide server ports.
// this can be a security issue.
// It defaults to false.
AnyPortEnable bool
//
// reading / writing
//
// the stream protocol (UDP or TCP).
// If nil, it is chosen automatically (first UDP, then, if it fails, TCP).
// It defaults to nil. | // at least a packet within this timeout.
// It defaults to 3 seconds.
InitialUDPReadTimeout time.Duration
// read buffer count.
// If greater than 1, allows to pass buffers to routines different than the one
// that is reading frames.
// It defaults to 1.
ReadBufferCount int
// read buffer size.
// This must be touched only when the server reports problems about buffer sizes.
// It defaults to 2048.
ReadBufferSize int
//
// callbacks
//
// callback called before every request.
OnRequest func(req *base.Request)
// callback called after every response.
OnResponse func(res *base.Response)
//
// system functions
//
// function used to initialize the TCP client.
// It defaults to (&net.Dialer{}).DialContext.
DialContext func(ctx context.Context, network, address string) (net.Conn, error)
// function used to initialize UDP listeners.
// It defaults to net.ListenPacket.
ListenPacket func(network, address string) (net.PacketConn, error)
//
// private
//
senderReportPeriod time.Duration
receiverReportPeriod time.Duration
}
// Dial connects to a server.
func (c *Client) Dial(scheme string, host string) (*ClientConn, error) {
return newClientConn(c, scheme, host)
}
// DialRead connects to the address and starts reading all tracks.
func (c *Client) DialRead(address string) (*ClientConn, error) {
return c.DialReadContext(context.Background(), address)
}
// DialReadContext connects to the address with the given context and starts reading all tracks.
func (c *Client) DialReadContext(ctx context.Context, address string) (*ClientConn, error) {
u, err := base.ParseURL(address)
if err != nil {
return nil, err
}
conn, err := c.Dial(u.Scheme, u.Host)
if err != nil {
return nil, err
}
ctxHandlerDone := make(chan struct{})
defer func() { <-ctxHandlerDone }()
ctxHandlerTerminate := make(chan struct{})
defer close(ctxHandlerTerminate)
go func() {
defer close(ctxHandlerDone)
select {
case <-ctx.Done():
conn.Close()
case <-ctxHandlerTerminate:
}
}()
_, err = conn.Options(u)
if err != nil {
conn.Close()
return nil, err
}
tracks, _, err := conn.Describe(u)
if err != nil {
conn.Close()
return nil, err
}
for _, track := range tracks {
_, err := conn.Setup(headers.TransportModePlay, track, 0, 0)
if err != nil {
conn.Close()
return nil, err
}
}
_, err = conn.Play()
if err != nil {
conn.Close()
return nil, err
}
return conn, nil
}
// DialPublish connects to the address and starts publishing the tracks.
func (c *Client) DialPublish(address string, tracks Tracks) (*ClientConn, error) {
return c.DialPublishContext(context.Background(), address, tracks)
}
// DialPublishContext connects to the address with the given context and starts publishing the tracks.
func (c *Client) DialPublishContext(ctx context.Context, address string, tracks Tracks) (*ClientConn, error) {
u, err := base.ParseURL(address)
if err != nil {
return nil, err
}
conn, err := c.Dial(u.Scheme, u.Host)
if err != nil {
return nil, err
}
ctxHandlerDone := make(chan struct{})
defer func() { <-ctxHandlerDone }()
ctxHandlerTerminate := make(chan struct{})
defer close(ctxHandlerTerminate)
go func() {
defer close(ctxHandlerDone)
select {
case <-ctx.Done():
conn.Close()
case <-ctxHandlerTerminate:
}
}()
_, err = conn.Options(u)
if err != nil {
conn.Close()
return nil, err
}
_, err = conn.Announce(u, tracks)
if err != nil {
conn.Close()
return nil, err
}
for _, track := range tracks {
_, err := conn.Setup(headers.TransportModeRecord, track, 0, 0)
if err != nil {
conn.Close()
return nil, err
}
}
_, err = conn.Record()
if err != nil {
conn.Close()
return nil, err
}
return conn, nil
} | StreamProtocol *StreamProtocol
// If the client is reading with UDP, it must receive |
config.js | export const API_KEY = "e15565837a7a29a9d67351034c9d991b"; | export const API_URL_ROOT = "https://api.themoviedb.org/3"; |
|
observer.py | # Subject/Observer usage example.
from ocempgui.events import Subject, IObserver
# The subject that should notify observers about state changes.
class | (Subject):
def __init__ (self):
Subject.__init__ (self, "MyObject")
self._x = "Simple Attribute"
self._y = 1234567890
self._z = None
def get_x (self):
return self._x
def set_x (self, value):
# Preserve old value.
old = self._x
self._x = value
# Notify about change.
self.notify ("x", old, value)
def get_y (self):
return self._y
def set_y (self, value):
# Preserve old value.
old = self._y
self._y = value
# Notify about change.
self.notify ("y", old, value)
def get_z (self):
return self._z
def set_z (self, value):
# Preserve old value.
old = self._z
self._z = value
# Notify about change.
self.notify ("z", old, value)
x = property (get_x, set_x)
y = property (get_y, set_y)
z = property (get_z, set_z)
class OwnObserver (IObserver):
def __init__ (self):
pass
def update (self, subject, prop, oldval, newval):
if subject == "MyObject": # A MyObject instance, check details.
if prop == "x":
# Its x value changed.
print "The x value of a MyObject instance changed from " \
"%s to %s" % (str (oldval), str (newval))
elif prop == "y":
# Its y value changed.
print "The y value of a MyObject instance changed from " \
"%s to %s" % (str (oldval), str (newval))
else:
# Another value changed.
print "The %s value of a MyObject instance changed from" \
"%s to %s" % (str (prop), str (oldval), str (newval))
class AnotherObserver (IObserver):
def __init__ (self):
pass
def update (self, subject, prop, oldval, newval):
print "Detail %s of %s changed from %s to %s" % (str (prop), subject,
str (oldval),
str (newval))
subject = MyObject ()
# Add tow observers doing
observer1 = OwnObserver ()
observer2 = AnotherObserver ()
subject.add (observer1, observer2)
subject.x = "FooBarBaz"
subject.y = subject.x * 3
subject.z = 100
| MyObject |
highfreq_processor.py | import os
import numpy as np
import pandas as pd
from qlib.data.dataset.processor import Processor
from qlib.data.dataset.utils import fetch_df_by_index
from typing import Dict
class HighFreqTrans(Processor):
def __init__(self, dtype: str = "bool"):
self.dtype = dtype
def fit(self, df_features):
pass
def | (self, df_features):
if self.dtype == "bool":
return df_features.astype(np.int8)
else:
return df_features.astype(np.float32)
class HighFreqNorm(Processor):
def __init__(
self,
fit_start_time: pd.Timestamp,
fit_end_time: pd.Timestamp,
feature_save_dir: str,
norm_groups: Dict[str, int],
):
self.fit_start_time = fit_start_time
self.fit_end_time = fit_end_time
self.feature_save_dir = feature_save_dir
self.norm_groups = norm_groups
def fit(self, df_features) -> None:
if os.path.exists(self.feature_save_dir) and len(os.listdir(self.feature_save_dir)) != 0:
return
os.makedirs(self.feature_save_dir)
fetch_df = fetch_df_by_index(df_features, slice(self.fit_start_time, self.fit_end_time), level="datetime")
del df_features
index = 0
names = {}
for name, dim in self.norm_groups.items():
names[name] = slice(index, index + dim)
index += dim
for name, name_val in names.items():
df_values = fetch_df.iloc(axis=1)[name_val].values
if name.endswith("volume"):
df_values = np.log1p(df_values)
self.feature_mean = np.nanmean(df_values)
np.save(self.feature_save_dir + name + "_mean.npy", self.feature_mean)
df_values = df_values - self.feature_mean
self.feature_std = np.nanstd(np.absolute(df_values))
np.save(self.feature_save_dir + name + "_std.npy", self.feature_std)
df_values = df_values / self.feature_std
np.save(self.feature_save_dir + name + "_vmax.npy", np.nanmax(df_values))
np.save(self.feature_save_dir + name + "_vmin.npy", np.nanmin(df_values))
return
def __call__(self, df_features):
if "date" in df_features:
df_features.droplevel("date", inplace=True)
df_values = df_features.values
index = 0
names = {}
for name, dim in self.norm_groups.items():
names[name] = slice(index, index + dim)
index += dim
for name, name_val in names.items():
feature_mean = np.load(self.feature_save_dir + name + "_mean.npy")
feature_std = np.load(self.feature_save_dir + name + "_std.npy")
if name.endswith("volume"):
df_values[:, name_val] = np.log1p(df_values[:, name_val])
df_values[:, name_val] -= feature_mean
df_values[:, name_val] /= feature_std
df_features = pd.DataFrame(data=df_values, index=df_features.index, columns=df_features.columns)
return df_features.fillna(0)
| __call__ |
create_cluster_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package operations
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime/middleware"
)
// NewCreateClusterParams creates a new CreateClusterParams object
// no default values defined in spec.
func NewCreateClusterParams() CreateClusterParams {
return CreateClusterParams{}
}
// CreateClusterParams contains all the bound params for the create cluster operation
// typically these are obtained from a http.Request
//
// swagger:parameters createCluster
type CreateClusterParams struct {
// HTTP Request Object
HTTPRequest *http.Request `json:"-"`
}
// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
// for simple values it will use straight method calls.
//
// To ensure default values, the struct must have been initialized with NewCreateClusterParams() beforehand.
func (o *CreateClusterParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
var res []error
o.HTTPRequest = r
if len(res) > 0 |
return nil
}
| {
return errors.CompositeValidationError(res...)
} |
skipping.py | """ support for skip/xfail functions and markers. """
from _pytest.config import hookimpl
from _pytest.mark.evaluate import MarkEvaluator
from _pytest.outcomes import fail
from _pytest.outcomes import skip
from _pytest.outcomes import xfail
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
"--runxfail",
action="store_true",
dest="runxfail",
default=False,
help="report the results of xfail tests as if they were not marked",
)
parser.addini(
"xfail_strict",
"default for the strict parameter of xfail "
"markers when not given explicitly (default: False)",
default=False,
type="bool",
)
def pytest_configure(config):
if config.option.runxfail:
# yay a hack
import pytest
old = pytest.xfail
config._cleanup.append(lambda: setattr(pytest, "xfail", old))
def nop(*args, **kwargs):
pass
nop.Exception = xfail.Exception
setattr(pytest, "xfail", nop)
config.addinivalue_line(
"markers",
"skip(reason=None): skip the given test function with an optional reason. "
'Example: skip(reason="no way of currently testing this") skips the '
"test.",
)
config.addinivalue_line(
"markers",
"skipif(condition): skip the given test function if eval(condition) "
"results in a True value. Evaluation happens within the "
"module global context. Example: skipif('sys.platform == \"win32\"') "
"skips the test if we are on the win32 platform. see "
"https://docs.pytest.org/en/latest/skipping.html",
)
config.addinivalue_line(
"markers",
"xfail(condition, reason=None, run=True, raises=None, strict=False): "
"mark the test function as an expected failure if eval(condition) "
"has a True value. Optionally specify a reason for better reporting "
"and run=False if you don't even want to execute the test function. "
"If only specific exception(s) are expected, you can list them in "
"raises, and if the test fails in other ways, it will be reported as "
"a true failure. See https://docs.pytest.org/en/latest/skipping.html",
)
@hookimpl(tryfirst=True)
def pytest_runtest_setup(item):
# Check if skip or skipif are specified as pytest marks
item._skipped_by_mark = False
eval_skipif = MarkEvaluator(item, "skipif")
if eval_skipif.istrue():
item._skipped_by_mark = True
skip(eval_skipif.getexplanation())
for skip_info in item.iter_markers(name="skip"):
item._skipped_by_mark = True
if "reason" in skip_info.kwargs:
skip(skip_info.kwargs["reason"])
elif skip_info.args:
skip(skip_info.args[0])
else:
skip("unconditional skip")
item._evalxfail = MarkEvaluator(item, "xfail")
check_xfail_no_run(item)
@hookimpl(hookwrapper=True)
def pytest_pyfunc_call(pyfuncitem):
check_xfail_no_run(pyfuncitem)
outcome = yield
passed = outcome.excinfo is None
if passed:
check_strict_xfail(pyfuncitem)
def check_xfail_no_run(item):
"""check xfail(run=False)"""
if not item.config.option.runxfail:
evalxfail = item._evalxfail
if evalxfail.istrue():
if not evalxfail.get("run", True):
xfail("[NOTRUN] " + evalxfail.getexplanation())
def check_strict_xfail(pyfuncitem):
"""check xfail(strict=True) for the given PASSING test"""
evalxfail = pyfuncitem._evalxfail
if evalxfail.istrue():
strict_default = pyfuncitem.config.getini("xfail_strict")
is_strict_xfail = evalxfail.get("strict", strict_default)
if is_strict_xfail:
del pyfuncitem._evalxfail
explanation = evalxfail.getexplanation()
fail("[XPASS(strict)] " + explanation, pytrace=False)
@hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
rep = outcome.get_result()
evalxfail = getattr(item, "_evalxfail", None)
# unittest special case, see setting of _unexpectedsuccess
if hasattr(item, "_unexpectedsuccess") and rep.when == "call":
if item._unexpectedsuccess:
rep.longrepr = "Unexpected success: {}".format(item._unexpectedsuccess)
else:
rep.longrepr = "Unexpected success"
rep.outcome = "failed"
elif item.config.option.runxfail:
pass # don't interfere
elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
rep.wasxfail = "reason: " + call.excinfo.value.msg
rep.outcome = "skipped"
elif evalxfail and not rep.skipped and evalxfail.wasvalid() and evalxfail.istrue():
if call.excinfo:
if evalxfail.invalidraise(call.excinfo.value):
rep.outcome = "failed"
else:
rep.outcome = "skipped"
rep.wasxfail = evalxfail.getexplanation()
elif call.when == "call":
strict_default = item.config.getini("xfail_strict")
is_strict_xfail = evalxfail.get("strict", strict_default)
explanation = evalxfail.getexplanation()
if is_strict_xfail:
rep.outcome = "failed"
rep.longrepr = "[XPASS(strict)] {}".format(explanation)
else:
rep.outcome = "passed"
rep.wasxfail = explanation
elif (
getattr(item, "_skipped_by_mark", False)
and rep.skipped
and type(rep.longrepr) is tuple
):
# skipped by mark.skipif; change the location of the failure
# to point to the item definition, otherwise it will display
# the location of where the skip exception was raised within pytest
_, _, reason = rep.longrepr
filename, line = item.location[:2]
rep.longrepr = filename, line + 1, reason
# called by terminalreporter progress reporting
def | (report):
if hasattr(report, "wasxfail"):
if report.skipped:
return "xfailed", "x", "XFAIL"
elif report.passed:
return "xpassed", "X", "XPASS"
| pytest_report_teststatus |
interfaces.py | import uuid
from typing import Any, Dict
from loguru import logger
from analytics.signal import analytic_signal
from users.models import CustomUser
class UserInterface:
@staticmethod
def get_username(*, user_id: uuid.UUID) -> Dict[str, Any]:
return {"username": CustomUser.objects.get(user_uuid=user_id).username}
@staticmethod
def get_user(*, username: str) -> Dict[str, CustomUser]:
return {"username": CustomUser.objects.get(username=username)}
class AnalyticInterface:
| @staticmethod
def create_analytic(*, model: Any, instance: Any, request: Any) -> None:
analytic_signal.send(sender=model, instance=instance, request=request)
logger.success(f"analytic data was created for {instance}") |
|
v1beta1_allowed_host_path.py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.20
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1beta1AllowedHostPath(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'path_prefix': 'str',
'read_only': 'bool'
}
attribute_map = {
'path_prefix': 'pathPrefix',
'read_only': 'readOnly'
}
def __init__(self, path_prefix=None, read_only=None, local_vars_configuration=None): # noqa: E501
"""V1beta1AllowedHostPath - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._path_prefix = None
self._read_only = None
self.discriminator = None
if path_prefix is not None:
self.path_prefix = path_prefix
if read_only is not None:
self.read_only = read_only
@property
def path_prefix(self):
"""Gets the path_prefix of this V1beta1AllowedHostPath. # noqa: E501
pathPrefix is the path prefix that the host volume must match. It does not support `*`. Trailing slashes are trimmed when validating the path prefix with a host path. Examples: `/foo` would allow `/foo`, `/foo/` and `/foo/bar` `/foo` would not allow `/food` or `/etc/foo` # noqa: E501
:return: The path_prefix of this V1beta1AllowedHostPath. # noqa: E501
:rtype: str
"""
return self._path_prefix
@path_prefix.setter
def path_prefix(self, path_prefix):
"""Sets the path_prefix of this V1beta1AllowedHostPath.
pathPrefix is the path prefix that the host volume must match. It does not support `*`. Trailing slashes are trimmed when validating the path prefix with a host path. Examples: `/foo` would allow `/foo`, `/foo/` and `/foo/bar` `/foo` would not allow `/food` or `/etc/foo` # noqa: E501
:param path_prefix: The path_prefix of this V1beta1AllowedHostPath. # noqa: E501
:type: str
"""
self._path_prefix = path_prefix
@property
def read_only(self):
"""Gets the read_only of this V1beta1AllowedHostPath. # noqa: E501
when set to true, will allow host volumes matching the pathPrefix only if all volume mounts are readOnly. # noqa: E501
:return: The read_only of this V1beta1AllowedHostPath. # noqa: E501
:rtype: bool
"""
return self._read_only
@read_only.setter
def read_only(self, read_only):
"""Sets the read_only of this V1beta1AllowedHostPath.
when set to true, will allow host volumes matching the pathPrefix only if all volume mounts are readOnly. # noqa: E501
:param read_only: The read_only of this V1beta1AllowedHostPath. # noqa: E501
:type: bool
"""
self._read_only = read_only
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def | (self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1AllowedHostPath):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1AllowedHostPath):
return True
return self.to_dict() != other.to_dict()
| __eq__ |
0003_auto_20190608_1137.py | # Generated by Django 2.2 on 2019-06-08 10:37
from django.db import migrations
class | (migrations.Migration):
dependencies = [
('shop', '0002_auto_20190608_1135'),
]
operations = [
migrations.RenameModel(
old_name='Smart_Watch',
new_name='Smart_Watche',
),
]
| Migration |
client_state.go | package types
import (
"time"
ics23 "github.com/confio/ics23/go"
tmmath "github.com/tendermint/tendermint/libs/math"
lite "github.com/tendermint/tendermint/lite2"
"github.com/cosmos/cosmos-sdk/codec"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
clientexported "github.com/cosmos/cosmos-sdk/x/ibc/02-client/exported"
clienttypes "github.com/cosmos/cosmos-sdk/x/ibc/02-client/types"
connectionexported "github.com/cosmos/cosmos-sdk/x/ibc/03-connection/exported"
connectiontypes "github.com/cosmos/cosmos-sdk/x/ibc/03-connection/types"
channelexported "github.com/cosmos/cosmos-sdk/x/ibc/04-channel/exported"
channeltypes "github.com/cosmos/cosmos-sdk/x/ibc/04-channel/types"
commitmentexported "github.com/cosmos/cosmos-sdk/x/ibc/23-commitment/exported"
commitmenttypes "github.com/cosmos/cosmos-sdk/x/ibc/23-commitment/types"
host "github.com/cosmos/cosmos-sdk/x/ibc/24-host"
)
var _ clientexported.ClientState = ClientState{}
// ClientState from Tendermint tracks the current validator set, latest height,
// and a possible frozen height.
type ClientState struct {
// Client ID
ID string `json:"id" yaml:"id"`
TrustLevel tmmath.Fraction `json:"trust_level" yaml:"trust_level"`
// Duration of the period since the LastestTimestamp during which the
// submitted headers are valid for upgrade
TrustingPeriod time.Duration `json:"trusting_period" yaml:"trusting_period"`
// Duration of the staking unbonding period
UnbondingPeriod time.Duration `json:"unbonding_period" yaml:"unbonding_period"`
// MaxClockDrift defines how much new (untrusted) header's Time can drift into
// the future.
MaxClockDrift time.Duration
// Block height when the client was frozen due to a misbehaviour
FrozenHeight uint64 `json:"frozen_height" yaml:"frozen_height"`
// Last Header that was stored by client
LastHeader Header `json:"last_header" yaml:"last_header"`
ProofSpecs []*ics23.ProofSpec `json:"proof_specs" yaml:"proof_specs"`
}
// InitializeFromMsg creates a tendermint client state from a CreateClientMsg
func InitializeFromMsg(msg MsgCreateClient) (ClientState, error) {
return Initialize(
msg.GetClientID(), msg.TrustLevel,
msg.TrustingPeriod, msg.UnbondingPeriod, msg.MaxClockDrift,
msg.Header, msg.ProofSpecs,
)
}
// Initialize creates a client state and validates its contents, checking that
// the provided consensus state is from the same client type.
func Initialize(
id string, trustLevel tmmath.Fraction,
trustingPeriod, ubdPeriod, maxClockDrift time.Duration,
header Header, specs []*ics23.ProofSpec,
) (ClientState, error) {
clientState := NewClientState(id, trustLevel, trustingPeriod, ubdPeriod, maxClockDrift, header, specs)
return clientState, nil
}
// NewClientState creates a new ClientState instance
func NewClientState(
id string, trustLevel tmmath.Fraction,
trustingPeriod, ubdPeriod, maxClockDrift time.Duration,
header Header, specs []*ics23.ProofSpec,
) ClientState {
return ClientState{
ID: id,
TrustLevel: trustLevel,
TrustingPeriod: trustingPeriod,
UnbondingPeriod: ubdPeriod,
MaxClockDrift: maxClockDrift,
LastHeader: header,
FrozenHeight: 0,
ProofSpecs: specs,
}
}
// GetID returns the tendermint client state identifier.
func (cs ClientState) GetID() string {
return cs.ID
}
// GetChainID returns the chain-id from the last header
func (cs ClientState) GetChainID() string {
if cs.LastHeader.SignedHeader.Header == nil {
return ""
}
return cs.LastHeader.SignedHeader.Header.ChainID
}
// ClientType is tendermint.
func (cs ClientState) ClientType() clientexported.ClientType {
return clientexported.Tendermint
}
// GetLatestHeight returns latest block height.
func (cs ClientState) GetLatestHeight() uint64 {
return uint64(cs.LastHeader.Height)
}
// GetLatestTimestamp returns latest block time.
func (cs ClientState) GetLatestTimestamp() time.Time {
return cs.LastHeader.Time
}
// IsFrozen returns true if the frozen height has been set.
func (cs ClientState) IsFrozen() bool {
return cs.FrozenHeight != 0
}
// Validate performs a basic validation of the client state fields.
func (cs ClientState) Validate() error {
if err := host.ClientIdentifierValidator(cs.ID); err != nil {
return err
}
if err := lite.ValidateTrustLevel(cs.TrustLevel); err != nil {
return err
}
if cs.TrustingPeriod == 0 {
return sdkerrors.Wrap(ErrInvalidTrustingPeriod, "trusting period cannot be zero")
}
if cs.UnbondingPeriod == 0 {
return sdkerrors.Wrap(ErrInvalidUnbondingPeriod, "unbonding period cannot be zero")
}
if cs.MaxClockDrift == 0 {
return sdkerrors.Wrap(ErrInvalidMaxClockDrift, "max clock drift cannot be zero")
}
if cs.TrustingPeriod >= cs.UnbondingPeriod {
return sdkerrors.Wrapf(
ErrInvalidTrustingPeriod,
"trusting period (%s) should be < unbonding period (%s)", cs.TrustingPeriod, cs.UnbondingPeriod,
)
}
// Validate ProofSpecs
if cs.ProofSpecs == nil {
return sdkerrors.Wrap(ErrInvalidProofSpecs, "proof specs cannot be nil for tm client")
}
for _, spec := range cs.ProofSpecs {
if spec == nil {
return sdkerrors.Wrap(ErrInvalidProofSpecs, "proof spec cannot be nil")
}
}
return cs.LastHeader.ValidateBasic(cs.GetChainID())
}
// GetProofSpecs returns the format the client expects for proof verification
// as a string array specifying the proof type for each position in chained proof
func (cs ClientState) GetProofSpecs() []*ics23.ProofSpec {
return cs.ProofSpecs
}
// VerifyClientConsensusState verifies a proof of the consensus state of the
// Tendermint client stored on the target machine.
func (cs ClientState) VerifyClientConsensusState(
_ sdk.KVStore,
cdc codec.Marshaler,
aminoCdc *codec.Codec,
provingRoot commitmentexported.Root,
height uint64,
counterpartyClientIdentifier string,
consensusHeight uint64,
prefix commitmentexported.Prefix,
proof []byte,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
clientPrefixedPath := "clients/" + counterpartyClientIdentifier + "/" + host.ConsensusStatePath(consensusHeight)
path, err := commitmenttypes.ApplyPrefix(prefix, clientPrefixedPath)
if err != nil {
return err
}
bz, err := aminoCdc.MarshalBinaryBare(consensusState)
if err != nil {
return err
}
if err := merkleProof.VerifyMembership(cs.ProofSpecs, provingRoot, path, bz); err != nil {
return sdkerrors.Wrap(clienttypes.ErrFailedClientConsensusStateVerification, err.Error())
}
return nil
}
// VerifyConnectionState verifies a proof of the connection state of the
// specified connection end stored on the target machine.
func (cs ClientState) VerifyConnectionState(
_ sdk.KVStore,
cdc codec.Marshaler,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
connectionID string,
connectionEnd connectionexported.ConnectionI,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
path, err := commitmenttypes.ApplyPrefix(prefix, host.ConnectionPath(connectionID))
if err != nil {
return err
}
connection, ok := connectionEnd.(connectiontypes.ConnectionEnd)
if !ok {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "invalid connection type %T", connectionEnd)
}
bz, err := cdc.MarshalBinaryBare(&connection)
if err != nil {
return err
}
if err := merkleProof.VerifyMembership(cs.ProofSpecs, consensusState.GetRoot(), path, bz); err != nil {
return sdkerrors.Wrap(clienttypes.ErrFailedConnectionStateVerification, err.Error())
}
return nil
}
// VerifyChannelState verifies a proof of the channel state of the specified
// channel end, under the specified port, stored on the target machine.
func (cs ClientState) VerifyChannelState(
_ sdk.KVStore,
cdc codec.Marshaler,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
portID,
channelID string,
channel channelexported.ChannelI,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
path, err := commitmenttypes.ApplyPrefix(prefix, host.ChannelPath(portID, channelID))
if err != nil {
return err
}
channelEnd, ok := channel.(channeltypes.Channel)
if !ok {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "invalid channel type %T", channel)
}
bz, err := cdc.MarshalBinaryBare(&channelEnd)
if err != nil {
return err
}
if err := merkleProof.VerifyMembership(cs.ProofSpecs, consensusState.GetRoot(), path, bz); err != nil {
return sdkerrors.Wrap(clienttypes.ErrFailedChannelStateVerification, err.Error())
}
return nil
}
// VerifyPacketCommitment verifies a proof of an outgoing packet commitment at
// the specified port, specified channel, and specified sequence.
func (cs ClientState) VerifyPacketCommitment(
_ sdk.KVStore,
cdc codec.Marshaler,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
portID,
channelID string,
sequence uint64,
commitmentBytes []byte,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
path, err := commitmenttypes.ApplyPrefix(prefix, host.PacketCommitmentPath(portID, channelID, sequence))
if err != nil {
return err
}
if err := merkleProof.VerifyMembership(cs.ProofSpecs, consensusState.GetRoot(), path, commitmentBytes); err != nil |
return nil
}
// VerifyPacketAcknowledgement verifies a proof of an incoming packet
// acknowledgement at the specified port, specified channel, and specified sequence.
func (cs ClientState) VerifyPacketAcknowledgement(
_ sdk.KVStore,
cdc codec.Marshaler,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
portID,
channelID string,
sequence uint64,
acknowledgement []byte,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
path, err := commitmenttypes.ApplyPrefix(prefix, host.PacketAcknowledgementPath(portID, channelID, sequence))
if err != nil {
return err
}
if err := merkleProof.VerifyMembership(cs.ProofSpecs, consensusState.GetRoot(), path, channeltypes.CommitAcknowledgement(acknowledgement)); err != nil {
return sdkerrors.Wrap(clienttypes.ErrFailedPacketAckVerification, err.Error())
}
return nil
}
// VerifyPacketAcknowledgementAbsence verifies a proof of the absence of an
// incoming packet acknowledgement at the specified port, specified channel, and
// specified sequence.
func (cs ClientState) VerifyPacketAcknowledgementAbsence(
_ sdk.KVStore,
cdc codec.Marshaler,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
portID,
channelID string,
sequence uint64,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
path, err := commitmenttypes.ApplyPrefix(prefix, host.PacketAcknowledgementPath(portID, channelID, sequence))
if err != nil {
return err
}
if err := merkleProof.VerifyNonMembership(cs.ProofSpecs, consensusState.GetRoot(), path); err != nil {
return sdkerrors.Wrap(clienttypes.ErrFailedPacketAckAbsenceVerification, err.Error())
}
return nil
}
// VerifyNextSequenceRecv verifies a proof of the next sequence number to be
// received of the specified channel at the specified port.
func (cs ClientState) VerifyNextSequenceRecv(
_ sdk.KVStore,
cdc codec.Marshaler,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
portID,
channelID string,
nextSequenceRecv uint64,
consensusState clientexported.ConsensusState,
) error {
merkleProof, err := sanitizeVerificationArgs(cdc, cs, height, prefix, proof, consensusState)
if err != nil {
return err
}
path, err := commitmenttypes.ApplyPrefix(prefix, host.NextSequenceRecvPath(portID, channelID))
if err != nil {
return err
}
bz := sdk.Uint64ToBigEndian(nextSequenceRecv)
if err := merkleProof.VerifyMembership(cs.ProofSpecs, consensusState.GetRoot(), path, bz); err != nil {
return sdkerrors.Wrap(clienttypes.ErrFailedNextSeqRecvVerification, err.Error())
}
return nil
}
// sanitizeVerificationArgs perfoms the basic checks on the arguments that are
// shared between the verification functions and returns the unmarshalled
// merkle proof and an error if one occurred.
func sanitizeVerificationArgs(
cdc codec.Marshaler,
cs ClientState,
height uint64,
prefix commitmentexported.Prefix,
proof []byte,
consensusState clientexported.ConsensusState,
) (merkleProof commitmenttypes.MerkleProof, err error) {
if cs.GetLatestHeight() < height {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrapf(
sdkerrors.ErrInvalidHeight,
"client state (%s) height < proof height (%d < %d)", cs.ID, cs.GetLatestHeight(), height,
)
}
if cs.IsFrozen() && cs.FrozenHeight <= height {
return commitmenttypes.MerkleProof{}, clienttypes.ErrClientFrozen
}
if prefix == nil {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrap(commitmenttypes.ErrInvalidPrefix, "prefix cannot be empty")
}
_, ok := prefix.(*commitmenttypes.MerklePrefix)
if !ok {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrapf(commitmenttypes.ErrInvalidPrefix, "invalid prefix type %T, expected *MerklePrefix", prefix)
}
if proof == nil {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrap(commitmenttypes.ErrInvalidProof, "proof cannot be empty")
}
if err = cdc.UnmarshalBinaryBare(proof, &merkleProof); err != nil {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrap(commitmenttypes.ErrInvalidProof, "failed to unmarshal proof into commitment merkle proof")
}
if consensusState == nil {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrap(clienttypes.ErrInvalidConsensus, "consensus state cannot be empty")
}
_, ok = consensusState.(ConsensusState)
if !ok {
return commitmenttypes.MerkleProof{}, sdkerrors.Wrapf(clienttypes.ErrInvalidConsensus, "invalid consensus type %T, expected %T", consensusState, ConsensusState{})
}
return merkleProof, nil
}
| {
return sdkerrors.Wrap(clienttypes.ErrFailedPacketCommitmentVerification, err.Error())
} |
map.go | package syncmap
| Load(key interface{}) (value interface{}, ok bool)
LoadOrStore(key, value interface{}) (actual interface{}, loaded bool)
} | type Map interface {
Delete(key interface{})
Range(f func(key, value interface{}) bool)
Store(key, value interface{}) |
par_bridge.rs | use crossbeam_deque::{Steal, Stealer, Worker};
use parking_lot::Mutex;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::thread::yield_now;
use crate::current_num_threads;
use crate::iter::plumbing::{bridge_unindexed, Folder, UnindexedConsumer, UnindexedProducer};
use crate::iter::ParallelIterator;
/// Conversion trait to convert an `Iterator` to a `ParallelIterator`.
///
/// This creates a "bridge" from a sequential iterator to a parallel one, by distributing its items
/// across the Rayon thread pool. This has the advantage of being able to parallelize just about
/// anything, but the resulting `ParallelIterator` can be less efficient than if you started with
/// `par_iter` instead. However, it can still be useful for iterators that are difficult to
/// parallelize by other means, like channels or file or network I/O.
///
/// The resulting iterator is not guaranteed to keep the order of the original iterator.
///
/// # Examples
///
/// To use this trait, take an existing `Iterator` and call `par_bridge` on it. After that, you can
/// use any of the `ParallelIterator` methods:
///
/// ```
/// use rayon::iter::ParallelBridge;
/// use rayon::prelude::ParallelIterator;
/// use std::sync::mpsc::channel;
///
/// let rx = {
/// let (tx, rx) = channel();
///
/// tx.send("one!");
/// tx.send("two!");
/// tx.send("three!");
///
/// rx
/// };
///
/// let mut output: Vec<&'static str> = rx.into_iter().par_bridge().collect();
/// output.sort_unstable();
///
/// assert_eq!(&*output, &["one!", "three!", "two!"]);
/// ```
pub trait ParallelBridge: Sized {
/// Creates a bridge from this type to a `ParallelIterator`.
fn par_bridge(self) -> IterBridge<Self>;
}
impl<T: Iterator + Send> ParallelBridge for T
where
T::Item: Send,
{
fn par_bridge(self) -> IterBridge<Self> {
IterBridge { iter: self }
}
}
/// `IterBridge` is a parallel iterator that wraps a sequential iterator.
///
/// This type is created when using the `par_bridge` method on `ParallelBridge`. See the
/// [`ParallelBridge`] documentation for details.
///
/// [`ParallelBridge`]: trait.ParallelBridge.html
#[derive(Debug, Clone)]
pub struct IterBridge<Iter> {
iter: Iter,
}
impl<Iter: Iterator + Send> ParallelIterator for IterBridge<Iter>
where
Iter::Item: Send,
{
type Item = Iter::Item;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
let split_count = AtomicUsize::new(current_num_threads());
let worker = Worker::new_fifo();
let stealer = worker.stealer();
let done = AtomicBool::new(false);
let iter = Mutex::new((self.iter, worker));
bridge_unindexed(
IterParallelProducer {
split_count: &split_count,
done: &done,
iter: &iter,
items: stealer,
},
consumer,
)
}
}
struct IterParallelProducer<'a, Iter: Iterator> {
split_count: &'a AtomicUsize,
done: &'a AtomicBool,
iter: &'a Mutex<(Iter, Worker<Iter::Item>)>,
items: Stealer<Iter::Item>,
}
// manual clone because T doesn't need to be Clone, but the derive assumes it should be
impl<'a, Iter: Iterator + 'a> Clone for IterParallelProducer<'a, Iter> {
fn clone(&self) -> Self {
IterParallelProducer {
split_count: self.split_count,
done: self.done,
iter: self.iter,
items: self.items.clone(), | }
}
impl<'a, Iter: Iterator + Send + 'a> UnindexedProducer for IterParallelProducer<'a, Iter>
where
Iter::Item: Send,
{
type Item = Iter::Item;
fn split(self) -> (Self, Option<Self>) {
let mut count = self.split_count.load(Ordering::SeqCst);
loop {
let done = self.done.load(Ordering::SeqCst);
match count.checked_sub(1) {
Some(new_count) if !done => {
let last_count =
self.split_count
.compare_and_swap(count, new_count, Ordering::SeqCst);
if last_count == count {
return (self.clone(), Some(self));
} else {
count = last_count;
}
}
_ => {
return (self, None);
}
}
}
}
fn fold_with<F>(self, mut folder: F) -> F
where
F: Folder<Self::Item>,
{
loop {
match self.items.steal() {
Steal::Success(it) => {
folder = folder.consume(it);
if folder.full() {
return folder;
}
}
Steal::Empty => {
if self.done.load(Ordering::SeqCst) {
// the iterator is out of items, no use in continuing
return folder;
} else {
// our cache is out of items, time to load more from the iterator
match self.iter.try_lock() {
Some(mut guard) => {
let count = current_num_threads();
let count = (count * count) * 2;
let (ref mut iter, ref worker) = *guard;
// while worker.len() < count {
// FIXME the new deque doesn't let us count items. We can just
// push a number of items, but that doesn't consider active
// stealers elsewhere.
for _ in 0..count {
if let Some(it) = iter.next() {
worker.push(it);
} else {
self.done.store(true, Ordering::SeqCst);
break;
}
}
}
None => {
// someone else has the mutex, just sit tight until it's ready
yield_now(); //TODO: use a thread=pool-aware yield? (#548)
}
}
}
}
Steal::Retry => (),
}
}
}
} | } |
group_test.go | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache v2.0 License.
package group
import (
"testing"
"github.com/microsoft/moc-sdk-for-go/services/cloud"
wssdcloud "github.com/microsoft/moc/rpc/cloudagent/cloud"
)
var (
name = "test"
Id = "1234"
)
func Test_getWssdGroup(t *testing.T) {
grp := &cloud.Group{
Name: &name,
ID: &Id,
}
wssdcloudGroup := getWssdGroup(grp)
if *grp.ID != wssdcloudGroup.Id {
t.Errorf("ID doesnt match post conversion")
}
if *grp.Name != wssdcloudGroup.Name {
t.Errorf("Name doesnt match post conversion")
}
}
func | (t *testing.T) {
wssdcloudGroup := &wssdcloud.Group{
Name: name,
Id: Id,
}
grp := getGroup(wssdcloudGroup)
if *grp.ID != wssdcloudGroup.Id {
t.Errorf("ID doesnt match post conversion")
}
if *grp.Name != wssdcloudGroup.Name {
t.Errorf("Name doesnt match post conversion")
}
}
| Test_getGroup |
native.py | from __future__ import absolute_import
import ast
import re
import operator as op
import pyparsing
from ..exceptions import CloudflareSolveError
from . import JavaScriptInterpreter
# ------------------------------------------------------------------------------- #
_OP_MAP = {
ast.Add: op.add,
ast.Sub: op.sub,
ast.Mult: op.mul,
ast.Div: op.truediv,
ast.Invert: op.neg,
}
# ------------------------------------------------------------------------------- #
class Calc(ast.NodeVisitor):
def visit_BinOp(self, node):
|
# ------------------------------------------------------------------------------- #
def visit_Num(self, node):
return node.n
# ------------------------------------------------------------------------------- #
def visit_Expr(self, node):
return self.visit(node.value)
# ------------------------------------------------------------------------------- #
@classmethod
def doMath(cls, expression):
tree = ast.parse(expression)
calc = cls()
return calc.visit(tree.body[0])
# ------------------------------------------------------------------------------- #
class Parentheses(object):
def fix(self, s):
res = []
self.visited = set([s])
self.dfs(s, self.invalid(s), res)
return res
# ------------------------------------------------------------------------------- #
def dfs(self, s, n, res):
if n == 0:
res.append(s)
return
for i in range(len(s)):
if s[i] in ['(', ')']:
s_new = s[:i] + s[i + 1:]
if s_new not in self.visited and self.invalid(s_new) < n:
self.visited.add(s_new)
self.dfs(s_new, self.invalid(s_new), res)
# ------------------------------------------------------------------------------- #
def invalid(self, s):
plus = minus = 0
memo = {"(": 1, ")": -1}
for c in s:
plus += memo.get(c, 0)
minus += 1 if plus < 0 else 0
plus = max(0, plus)
return plus + minus
# ------------------------------------------------------------------------------- #
class ChallengeInterpreter(JavaScriptInterpreter):
def __init__(self):
super(ChallengeInterpreter, self).__init__('native')
# ------------------------------------------------------------------------------- #
def eval(self, body, domain):
operators = {
'+': op.add,
'-': op.sub,
'*': op.mul,
'/': op.truediv
}
# ------------------------------------------------------------------------------- #
def flatten(lists):
return sum(map(flatten, lists), []) if isinstance(lists, list) else [lists]
# ------------------------------------------------------------------------------- #
def jsfuckToNumber(jsFuck):
# "Clean Up" JSFuck
jsFuck = jsFuck.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0')
jsFuck = jsFuck.lstrip('+').replace('(+', '(').replace(' ', '')
jsFuck = Parentheses().fix(jsFuck)[0]
# Hackery Parser for Math
stack = []
bstack = []
for i in flatten(pyparsing.nestedExpr().parseString(jsFuck).asList()):
if i == '+':
stack.append(bstack)
bstack = []
continue
bstack.append(i)
stack.append(bstack)
return int(''.join([str(Calc.doMath(''.join(i))) for i in stack]))
# ------------------------------------------------------------------------------- #
def divisorMath(payload, needle, domain):
jsfuckMath = payload.split('/')
if needle in jsfuckMath[1]:
expression = re.findall(r"^(.*?)(.)\(function", jsfuckMath[1])[0]
expression_value = operators[expression[1]](
float(jsfuckToNumber(expression[0])),
float(ord(domain[jsfuckToNumber(jsfuckMath[1][
jsfuckMath[1].find('"("+p+")")}') + len('"("+p+")")}'):-2
])]))
)
else:
expression_value = jsfuckToNumber(jsfuckMath[1])
expression_value = jsfuckToNumber(jsfuckMath[0]) / float(expression_value)
return expression_value
# ------------------------------------------------------------------------------- #
def challengeSolve(body, domain):
jschl_answer = 0
try:
jsfuckChallenge = re.search(
r"setTimeout\(function\(\){\s+var.*?f,\s*(?P<variable>\w+).*?:(?P<init>\S+)};"
r".*?\('challenge-form'\);.*?;(?P<challenge>.*?a\.value)\s*=\s*\S+\.toFixed\(10\);",
body,
re.DOTALL | re.MULTILINE
).groupdict()
except AttributeError:
raise CloudflareSolveError('There was an issue extracting "jsfuckChallenge" from the Cloudflare challenge.')
kJSFUCK = re.search(r'(;|)\s*k.=(?P<kJSFUCK>\S+);', jsfuckChallenge['challenge'], re.S | re.M)
if kJSFUCK:
try:
kJSFUCK = jsfuckToNumber(kJSFUCK.group('kJSFUCK'))
except IndexError:
raise CloudflareSolveError('There was an issue extracting "kJSFUCK" from the Cloudflare challenge.')
try:
kID = re.search(r"\s*k\s*=\s*'(?P<kID>\S+)';", body).group('kID')
except IndexError:
raise CloudflareSolveError('There was an issue extracting "kID" from the Cloudflare challenge.')
try:
r = re.compile(r'<div id="{}(?P<id>\d+)">\s*(?P<jsfuck>[^<>]*)</div>'.format(kID))
kValues = {}
for m in r.finditer(body):
kValues[int(m.group('id'))] = m.group('jsfuck')
jsfuckChallenge['k'] = kValues[kJSFUCK]
except (AttributeError, IndexError):
raise CloudflareSolveError('There was an issue extracting "kValues" from the Cloudflare challenge.')
jsfuckChallenge['challenge'] = re.finditer(
r'{}.*?([+\-*/])=(.*?);(?=a\.value|{})'.format(
jsfuckChallenge['variable'],
jsfuckChallenge['variable']
),
jsfuckChallenge['challenge']
)
# ------------------------------------------------------------------------------- #
if '/' in jsfuckChallenge['init']:
val = jsfuckChallenge['init'].split('/')
jschl_answer = jsfuckToNumber(val[0]) / float(jsfuckToNumber(val[1]))
else:
jschl_answer = jsfuckToNumber(jsfuckChallenge['init'])
# ------------------------------------------------------------------------------- #
for expressionMatch in jsfuckChallenge['challenge']:
oper, expression = expressionMatch.groups()
if '/' in expression:
expression_value = divisorMath(expression, 'function(p)', domain)
else:
if 'Element' in expression:
expression_value = divisorMath(jsfuckChallenge['k'], '"("+p+")")}', domain)
else:
expression_value = jsfuckToNumber(expression)
jschl_answer = operators[oper](jschl_answer, expression_value)
# ------------------------------------------------------------------------------- #
# if not jsfuckChallenge['k'] and '+ t.length' in body:
# jschl_answer += len(domain)
# ------------------------------------------------------------------------------- #
return '{0:.10f}'.format(jschl_answer)
# ------------------------------------------------------------------------------- #
return challengeSolve(body, domain)
# ------------------------------------------------------------------------------- #
ChallengeInterpreter()
| return _OP_MAP[type(node.op)](self.visit(node.left), self.visit(node.right)) |
htmlexample_module.py | # Example module for Online Python Tutor
# Philip Guo
# 2013-08-03
# To get the Online Python Tutor backend to import this custom module,
# add its filename ('htmlexample_module') to the CUSTOM_MODULE_IMPORTS
# tuple in pg_logger.py
# To see an example of this module at work, write the following code in
# http://pythontutor.com/visualize.html
'''
from htmlexample_module import ColorTable
t = ColorTable(3, 4)
t.set_color(0, 0, 'red')
t.render_HTML()
t.set_color(1, 1, 'green')
t.render_HTML()
t.set_color(2, 2, 'blue')
t.render_HTML()
for i in range(3):
for j in range(4):
t.set_color(i, j, 'gray')
t.render_HTML()
'''
# defines a simple table where you can set colors for individual rows and columns
class ColorTable:
def __init__(self, num_rows, num_columns):
self.num_rows = num_rows
self.num_columns = num_columns
# create a 2D matrix of empty strings
self.table = []
for i in range(self.num_rows):
new_lst = ['' for e in range(self.num_columns)]
self.table.append(new_lst)
# color must be a legal HTML color string
def set_color(self, row, column, color):
assert 0 <= row < self.num_rows
assert 0 <= column < self.num_columns
self.table[row][column] = color
# call this function whenever you want to render this table in HTML
def render_HTML(self):
# incrementally build up an HTML table string
html_string = '<table>'
for i in range(self.num_rows):
html_string += '<tr>'
for j in range(self.num_columns):
color = self.table[i][j]
if not color:
color = "white" | html_string += '''<td style="width: 30px; height: 30px; border: 1px solid black;
background-color: %s;"></td>''' % color
html_string += '</tr>'
html_string += '</table>'
# then call the magic setHTML function
setHTML(html_string) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.