file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
form_editable.js | /* ------------------------------------------------------------------------------
*
* # Editable component
*
* Specific JS code additions for form_editable.html page
*
* Version: 1.0
* Latest update: Aug 1, 2015
*
* ---------------------------------------------------------------------------- */
$(function() {
// Override defaults
// ------------------------------
// Disable highlight
$.fn.editable.defaults.highlight = false;
// Output template
$.fn.editableform.template = '<form class="editableform">' +
'<div class="control-group">' +
'<div class="editable-input"></div> <div class="editable-buttons"></div>' +
'<div class="editable-error-block"></div>' +
'</div> ' +
'</form>'
// Set popup mode as default
$.fn.editable.defaults.mode = 'popup';
// Buttons
$.fn.editableform.buttons =
'<button type="submit" class="btn btn-primary btn-icon editable-submit"><i class="icon-check"></i></button>' +
'<button type="button" class="btn btn-default btn-icon editable-cancel"><i class="icon-x"></i></button>';
// Demo settings
// ------------------------------
// Toggle editable state
var toggleState = document.querySelector('.switchery');
var toggleStateInit = new Switchery(toggleState);
toggleState.onchange = function() {
if(toggleState.checked) {
$('.editable').editable('enable');
}
else {
$('.editable').editable('disable');
}
};
// Write log in console
function log(settings, response) {
var s = [], str;
s.push(settings.type.toUpperCase() + ' url = "' + settings.url + '"');
for(var a in settings.data) {
if(settings.data[a] && typeof settings.data[a] === 'object') {
str = [];
for(var j in settings.data[a]) {str.push(j+': "'+settings.data[a][j]+'"');}
str = '{ '+str.join(', ')+' }';
}
else {
str = '"'+settings.data[a]+'"';
}
s.push(a + ' = ' + str);
}
s.push('RESPONSE: status = ' + response.status);
if(response.responseText) {
if($.isArray(response.responseText)) {
s.push('[');
$.each(response.responseText, function(i, v){
s.push('{value: ' + v.value+', text: "'+v.text+'"}');
});
s.push(']');
}
else {
s.push($.trim(response.responseText));
}
}
s.push('--------------------------------------\n');
};
// Basic editable components
// ------------------------------
// Editable text field
$('#text-field').editable();
// Disable clear button
$('#disabled-clear').editable({
clear: false
});
// With helper text
$('#text-field-help').editable();
$('#text-field-help').on('shown', function(e, editable) {
$('<span class="help-block">This is a help block</div>').insertAfter(editable.input.$input);
});
// Empty field
$('#empty-field').editable();
// Required text field
$('#empty-field-validate').editable({
validate: function(value) {
if($.trim(value) == '') return 'This field is required';
}
});
//
// Textareas
//
// Textarea
$('#textarea').editable({
rows: 2,
showbuttons: 'bottom'
});
// Elastic textarea
$('#textarea-elastic').editable({
rows: 2,
showbuttons: 'bottom'
});
$('#textarea-elastic').on('shown', function(e, editable) {
editable.input.$input.addClass('elastic').autosize();
});
//
// Buttons and icons
//
// Button variations
$('#button-variation').editable();
$('#button-variation').on('shown', function(e, editable) {
editable.input.$input.parents('.editable-input').parent().find('.editable-submit').removeClass('btn-success').addClass('btn-danger');
editable.input.$input.parents('.editable-input').parent().find('.editable-cancel').removeClass('btn-danger').addClass('btn-success');
});
// Icon variations
$('#icon-variation').editable();
$('#icon-variation').on('shown', function(e, editable) {
editable.input.$input.parents('.editable-input').parent().find('.editable-submit').children().removeClass('icon-check').addClass('icon-task');
editable.input.$input.parents('.editable-input').parent().find('.editable-cancel').children().removeClass('icon-x').addClass('icon-menu6');
});
//
// Dates
//
// Date field
$('#date').editable({
showbuttons: 'bottom'
});
// Date picker
$('#datepicker').editable({
showbuttons: 'bottom',
datepicker: {
isRTL: true,
autoSize: true
}
});
// Date time
$('#datetime').editable({
combodate: {
firstItem: 'name'
},
showbuttons: 'bottom'
});
//
// Multiple fields
//
// Simulate ajax requests
$.mockjax({
url: '/address',
response: function(settings) {
log(settings, this);
}
});
// Initialize
$('#multiple-fields').editable({
url: '/address',
showbuttons: 'bottom',
value: {
city: "Moscow",
street: "Lenina",
building: "12"
},
tpl: '<div class="editable-address form-group"><label>City: </label><input type="text" name="city" class="form-control"></div>'+
'<div class="editable-address form-group"><label>Street: </label><input type="text" name="street" class="form-control"></div>'+
'<div class="editable-address form-group"><label>Building: </label><input type="text" name="building" class="form-control"></div>',
validate: function(value) {
if(value.city == '') return 'city is required!';
},
display: function(value) {
if(!value) {
$(this).empty();
return;
}
var html = '<b>' + $('<div>').text(value.city).html() + '</b>, ' + $('<div>').text(value.street).html() + ' st., bld. ' + $('<div>').text(value.building).html();
$(this).html(html);
}
});
//
// Input groups
//
// Addons
$('#input-group-addon').editable();
$('#input-group-addon').on('shown', function (e, editable) {
editable.input.$input.wrap('<div class="input-group"></div>');
$('<span class="input-group-addon"><i class="icon-mention"></i></span>').insertBefore(editable.input.$input);
});
// Buttons
$('#input-group-button').editable();
$('#input-group-button').on('shown', function (e, editable) {
editable.input.$input.wrap('<div class="input-group"></div>');
$('<div class="input-group-btn"><button type="button" class="btn btn-default">Button</button></div>').insertBefore(editable.input.$input);
});
// Dropdown
$('#input-group-dropdown').editable();
$('#input-group-dropdown').on('shown', function (e, editable) {
editable.input.$input.wrap('<div class="input-group"></div>');
$('<div class="input-group-btn"><button type="button" class="btn btn-default btn-icon" data-toggle="dropdown"><i class="icon-cog5"></i> <span class="caret"></span></button> <ul class="dropdown-menu"> <li><a href="#">Action</a></li> <li><a href="#">Another action</a></li> <li><a href="#">Something else here</a></li> </ul> </div>').insertBefore(editable.input.$input);
});
// Basic selects
// ------------------------------
// Basic select
$('#select-default').editable({
prepend: "Not selected",
source: [
{value: 1, text: 'Male'},
{value: 2, text: 'Female'}
],
display: function(value, sourceData) {
var colors = {"": "gray", 1: "green", 2: "blue"},
elem = $.grep(sourceData, function(o){return o.value == value;});
if(elem.length) {
$(this).text(elem[0].text).css("color", colors[value]);
}
else {
$(this).empty();
}
}
});
//
// Dependent select
//
// Simulate ajax requests
$.mockjax({
url: '/default-list',
responseTime: 400,
response: function(settings) {
if(settings.data.value == 'err') {
this.status = 500;
this.responseText = 'Validation error!';
}
else {
this.responseText = '';
}
}
});
// Data
var sources = {
1: [{value: 11, text: 11}, {value: 111, text: 111}],
2: [{value: 22, text: 22}, {value: 222, text: 222}]
};
// Initialize first list
$('#default-list').editable({
url: '/default-list',
pk: 1,
source: [{value: 1, text: 'text1'}, {value: 2, text: 'text2'}],
title: 'Select1',
success: function(response, newValue) {
$('#dependent-list').editable('option', 'source', sources[newValue]);
$('#dependent-list').editable('setValue', null);
}
});
// Initialize dependent list
$('#dependent-list').editable({
url: '/default-list',
pk: 1,
title: 'Select2',
sourceError: 'Please, select value in first list'
});
//
// Select with remote source
//
// Simulate ajax requests
$.mockjax({
url: '/remote',
response: function(settings) {
this.responseText = [
{value: 0, text: 'Guest'},
{value: 1, text: 'Service'},
{value: 2, text: 'Customer'},
{value: 3, text: 'Operator'},
{value: 4, text: 'Support'},
{value: 5, text: 'Admin'}
];
log(settings, this);
}
});
// Initialize
$('#select-default-remote').editable({
source: '/remote',
showbuttons: false
});
//
// Select with loading error
//
// Simulate ajax requests
$.mockjax({
url: '/error',
status: 500,
response: function(settings) {
this.responseText = 'Internal Server Error';
}
});
// Initialize
$('#select-default-error').editable({
source: '/error'
});
// Checkboxes and radios
// ------------------------------
// Single unstyled checkbox
$('#single-unstyled-checkbox').editable({
source: {'1': 'Enabled'},
emptytext: 'Disabled',
showbuttons: 'bottom',
tpl: '<div class="checkbox"></div>'
});
// Single styled checkbox
$('#single-styled-checkbox').editable({
source: {'1': 'Enabled'},
emptytext: 'Disabled',
showbuttons: 'bottom',
tpl: '<div class="checkbox"></div>'
});
$('#single-styled-checkbox').on('shown', function(e, editable) {
editable.input.$input.uniform();
});
// Initialize uniform
$(".styled, .multiselect-container input, .file-input > :file").uniform({
radioClass: 'choice',
fileButtonHtml: '<i class="icon-googleplus"></i>'
});
//
// Checklists
//
// Unstyled checklist
$('#unstyled-checklist').editable({
source: [
{value: 1, text: 'banana'},
{value: 2, text: 'peach'},
{value: 3, text: 'apple'},
{value: 4, text: 'watermelon'},
{value: 5, text: 'orange'}
],
showbuttons: 'bottom',
tpl: '<div class="checkbox"></div>'
});
// Styled checklist
$('#styled-checklist').editable({
source: [
{value: 1, text: 'banana'},
{value: 2, text: 'peach'},
{value: 3, text: 'apple'},
{value: 4, text: 'watermelon'},
{value: 5, text: 'orange'}
],
showbuttons: 'bottom',
tpl: '<div class="checkbox"></div>'
});
// Update uniform dynamically
$('#styled-checklist').on('shown', function(e, editable) {
editable.input.$input.uniform();
});
//
// Single switchery toggle
//
// Initialize plugin
$('#switchery-checkbox').editable({
source: {'1': 'Enabled'},
emptytext: 'Disabled',
showbuttons: 'bottom',
tpl: '<div class="checkbox checkbox-switchery switchery-xs"></div>'
});
// Initialize plugin and insert in editable popup on show
$('#switchery-checkbox').on('shown', function (e, editable) {
editable.input.$input.addClass('switcher-single');
var elem = document.querySelector('.switcher-single');
var init = new Switchery(elem);
});
//
// Switchery checklist
//
// Initialize plugin
$('#switchery-checklist').editable({
source: [
{value: 1, text: 'banana'},
{value: 2, text: 'peach'},
{value: 3, text: 'apple'},
{value: 4, text: 'watermelon'},
{value: 5, text: 'orange'}
],
showbuttons: 'bottom',
tpl: '<div class="checkbox checkbox-switchery switchery-xs"></div>'
});
// Initialize plugin and insert in editable popup on show
$('#switchery-checklist').on('shown', function(e, editable) {
editable.input.$input.addClass('switcher');
var elems = Array.prototype.slice.call(document.querySelectorAll('.switcher'));
elems.forEach(function(html) {
var switchery = new Switchery(html);
});
});
//
// Unordered checkbox list
//
// Initialize editable
$.mockjax({
url: '/checkbox-unordered-list',
status: 200,
responseTime: 200
});
// Add data
$.mockjax({
url: '/source-ul',
status: 200,
responseTime: 400,
response: function(settings) {
this.responseText = [
{value: 0, text: 'Guest'},
{value: 1, text: 'Service'},
{value: 2, text: 'Customer'},
{value: 3, text: 'Operator'},
{value: 4, text: 'Support'},
{value: 5, text: 'Admin'}
];
}
});
// Initialize plugin
$('#checkbox-unordered-list').editable({
source: '/source-ul',
url: '/checkbox-unordered-list',
display: function(value, sourceData) {
var $el = $('#list'),
checked, html = '';
if(!value) {
$el.empty();
return;
}
checked = $.grep(sourceData, function(o){
return $.grep(value, function(v){
return v == o.value;
}).length;
});
$.each(checked, function(i, v) {
html+= '<li>'+$.fn.editableutils.escape(v.text)+'</li>';
});
if(html) html = '<ul class="list-inline" style="margin-top: 10px;">'+html+'</ul>';
$el.html(html);
},
showbuttons: 'bottom',
tpl: '<div class="checkbox"></div>'
});
// Initialize plugin and insert in editable popup on show
$('#checkbox-unordered-list').on('shown', function(e, editable) {
editable.input.$input.uniform();
});
// Advanced initialization
// ------------------------------
//
// Autotext option
//
// Simulate ajax requests
$.mockjax({
url: '/autotext-url',
status: 200,
responseTime: 200
});
// Add data
$.mockjax({
url: '/groups',
status: 200,
responseTime: 400,
response: function(settings) {
this.responseText = [
{value: 0, text: 'Guest'},
{value: 1, text: 'Service'},
{value: 2, text: 'Customer'},
{value: 3, text: 'Operator'},
{value: 4, text: 'Support'},
{value: 5, text: 'Admin'}
];
}
});
// Initialize
$('#editable-autotext').editable({
url: '/autotext-url'
});
//
// PUT method submit
//
// Simulate ajax requests
$.mockjax({
url: '/editable-put-submit',
responseTime: 200,
response: function(settings) {
console.log(settings);
}
});
// Initialize
$('#editable-put-submit').editable({
url: '/editable-put-submit',
ajaxOptions: {
type: 'put'
}
});
//
// Render server response
//
// Simulate ajax requests
$.mockjax({
url: '/editable-render-response',
responseTime: 400,
response: function(settings) {
this.responseText = 'New value: <b>'+settings.data.value+'</b>';
}
});
// Initialize
$('#editable-render-response').editable({
url: '/editable-render-response',
display: function(value, response) {
$(this).html(response);
}
});
//
// Process JSON response
//
// Simulate ajax requests
$.mockjax({
url: '/editable-json-response',
responseTime: 200,
response: function(settings) {
if(settings.data.value) {
this.responseText = '{"success": true}';
}
else {
this.responseText = '{"success": false, "msg": "required"}';
}
}
});
// Initialize
$('#editable-json-response').editable({
url: '/editable-json-response',
ajaxOptions: {
dataType: 'json'
},
success: function(response, newValue) {
if(!response) {
return "Unknown error!";
}
if(response.success === false) {
return response.msg;
}
}
});
//
// Input types
//
// Simulate ajax requests
$.mockjax({
url: '/post-fields', | log(settings, this);
}
});
// Password
$('#type-password').editable({
url: '/post-fields',
title: 'Enter your password'
});
// Email
$('#type-email').editable({
url: '/post-fields',
title: 'Enter your email'
});
// Url
$('#type-url').editable({
url: '/post-fields',
title: 'Enter URL'
});
// Tel
$('#type-tel').editable({
url: '/post-fields',
title: 'Enter phone number'
});
// Number
$('#type-number').editable({
url: '/post-fields',
title: 'Enter any number'
});
// Range
$('#type-range').editable({
url: '/post-fields',
title: 'Number range'
});
// Time
$('#type-time').editable({
url: '/post-fields',
title: 'Time'
});
// Plugins and extensions
// ------------------------------
//
// Tag inputs
//
// Text tags input
$('#input-tags-text').editable({
showbuttons: 'bottom',
clear: false,
display: function(value) {
$(this).html(value).addClass('clearfix display-inline-block');
$(this).each(function() {
var text = $(this).text().split(',');
for( var i = 0, len = text.length; i < len; i++ ) {
text[i] = '<span class="text-left">' + text[i] + '</span>';
}
$(this).html(text.join('<span class="text-left">, </span>'));
});
}
});
$('#input-tags-text').on('shown', function(e, editable) {
editable.input.$input.tagsinput({
maxTags: 5
});
});
// Label tags input
$('#input-tags-labels').editable({
showbuttons: 'bottom',
clear: false,
display: function(value) {
$(this).html(value);
$(this).each(function() {
var text = $(this).text().split(',');
for( var i = 0, len = text.length; i < len; i++ ) {
text[i] = '<span class="label label-primary">' + text[i] + '</span>';
}
$(this).html(text.join(' '));
});
}
});
$('#input-tags-labels').on('shown', function(e, editable) {
editable.input.$input.tagsinput({
maxTags: 5
});
});
//
// Typeahead
//
// Initialize editable
$('#editable-typeahead').editable({
value: 'California'
});
// Typeahead
$('#editable-typeahead').on('shown', function(e, editable) {
var substringMatcher = function(strs) {
return function findMatches(q, cb) {
var matches, substringRegex;
// an array that will be populated with substring matches
matches = [];
// regex used to determine if a string contains the substring `q`
substrRegex = new RegExp(q, 'i');
// iterate through the pool of strings and for any string that
// contains the substring `q`, add it to the `matches` array
$.each(strs, function(i, str) {
if (substrRegex.test(str)) {
// the typeahead jQuery plugin expects suggestions to a
// JavaScript object, refer to typeahead docs for more info
matches.push({ value: str });
}
});
cb(matches);
};
};
// Data
var states = ['Alabama', 'Alaska', 'Arizona', 'Arkansas', 'California',
'Colorado', 'Connecticut', 'Delaware', 'Florida', 'Georgia', 'Hawaii',
'Idaho', 'Illinois', 'Indiana', 'Iowa', 'Kansas', 'Kentucky', 'Louisiana',
'Maine', 'Maryland', 'Massachusetts', 'Michigan', 'Minnesota',
'Mississippi', 'Missouri', 'Montana', 'Nebraska', 'Nevada', 'New Hampshire',
'New Jersey', 'New Mexico', 'New York', 'North Carolina', 'North Dakota',
'Ohio', 'Oklahoma', 'Oregon', 'Pennsylvania', 'Rhode Island',
'South Carolina', 'South Dakota', 'Tennessee', 'Texas', 'Utah', 'Vermont',
'Virginia', 'Washington', 'West Virginia', 'Wisconsin', 'Wyoming'
];
// Initialize typeahead
editable.input.$input.typeahead(
{
hint: true,
highlight: true,
minLength: 1
},
{
name: 'states',
displayKey: 'value',
source: substringMatcher(states)
}
).attr('dir', 'rtl');
});
//
// Touchspin spinners
//
// Basic
$('#input-touchspin-basic').editable({
clear: false
});
$('#input-touchspin-basic').on('shown', function(e, editable) {
editable.input.$input.TouchSpin({
min: 0,
max: 100,
step: 0.1,
decimals: 2
}).parent().parent().addClass('editable-touchspin');
});
// Advanced
$('#input-touchspin-advanced').editable({
clear: false
});
$('#input-touchspin-advanced').on('shown', function(e, editable) {
editable.input.$input.TouchSpin({
prefix: '<i class="icon-accessibility"></i>',
postfix: '<i class="icon-paragraph-justify2"></i>'
}).parent().parent().addClass('editable-touchspin');
});
//
// Input mask
//
// Initialize editable
$('#input-mask').editable({
emptytext: 'Your credit card number'
});
// Initialize plugin and insert in editable popup on show
$('#input-mask').on('shown', function(e, editable) {
editable.input.$input.inputmask({
mask: '9999-9999-9999-9999'
});
$('<span class="help-block">9999-9999-9999-9999</div>').insertAfter(editable.input.$input);
});
//
// Input formatter
//
// Initialize editable
$('#input-formatter').editable({
emptytext: 'Your birth date'
});
// Initialize plugin and insert in editable popup on show
$('#input-formatter').on('shown', function(e, editable) {
editable.input.$input.formatter({
pattern: '{{99}}/{{99}}/{{9999}}'
});
$('<span class="help-block">99/99/9999</div>').insertAfter(editable.input.$input);
});
// Select2 select
// ------------------------------
// Data
var select2_countries = [];
$.each(
{
"BD": "Bangladesh",
"BE": "Belgium",
"BF": "Burkina Faso",
"BG": "Bulgaria",
"BA": "Bosnia and Herzegovina",
"BB": "Barbados",
"WF": "Wallis and Futuna",
"BL": "Saint Bartelemey",
"BM": "Bermuda",
"BN": "Brunei Darussalam",
"BO": "Bolivia",
"BH": "Bahrain",
"BI": "Burundi",
"BJ": "Benin",
"BT": "Bhutan",
"JM": "Jamaica",
"BV": "Bouvet Island",
"BW": "Botswana",
"WS": "Samoa",
"BR": "Brazil",
"BS": "Bahamas",
"JE": "Jersey",
"BY": "Belarus",
"O1": "Other Country",
"LV": "Latvia",
"RW": "Rwanda",
"RS": "Serbia",
"TL": "Timor-Leste",
"RE": "Reunion",
"LU": "Luxembourg",
"TJ": "Tajikistan",
"RO": "Romania",
"PG": "Papua New Guinea",
"GW": "Guinea-Bissau",
"GU": "Guam",
"GT": "Guatemala",
"GS": "South Georgia and the South Sandwich Islands",
"GR": "Greece",
}, function(k, v) {
select2_countries.push({id: k, text: v});
}
);
// Single select
$('#select2-single').editable({
source: select2_countries,
select2: {
width: 200,
placeholder: 'Select country',
allowClear: true
}
});
// Multiple select
$('#select2-multiple').editable({
showbuttons: 'bottom',
select2: {
width: 300,
tags: ['html', 'javascript', 'css', 'ajax'],
tokenSeparators: [",", " "],
multiple: true,
placeholder: 'Select something'
}
});
//
// Remote source
//
// Initialize
$('#select2-single-remote').editable({
select2: {
width: '300',
placeholder: 'Select Country',
minimumInputLength: 1,
ajax: { // instead of writing the function to execute the request we use Select2's convenient helper
url: "http://api.rottentomatoes.com/api/public/v1.0/movies.json",
dataType: 'jsonp',
data: function (term, page) {
return {
q: term, // search term
page_limit: 10,
apikey: "ju6z9mjyajq2djue3gbvv26t" // please do not use so this example keeps working
};
},
results: function (data, page) { // parse the results into the format expected by Select2.
// since we are using custom formatting functions we do not need to alter remote JSON data
return {results: data.movies};
}
},
initSelection: function(element, callback) {
// the input tag has a value attribute preloaded that points to a preselected movie's id
// this function resolves that id attribute to an object that select2 can render
// using its formatResult renderer - that way the movie name is shown preselected
var id=$(element).val();
if (id!=="") {
$.ajax("http://api.rottentomatoes.com/api/public/v1.0/movies/"+id+".json", {
data: {
apikey: "ju6z9mjyajq2djue3gbvv26t"
},
dataType: "jsonp"
}).done(function(data) { callback(data); });
}
},
formatResult: movieFormatResult, // omitted for brevity, see the source of this page
formatSelection: movieFormatSelection, // omitted for brevity, see the source of this page
dropdownCssClass: "bigdrop", // apply css that makes the dropdown taller
escapeMarkup: function (m) { return m; } // we do not want to escape markup since we are displaying html in results
}
});
// Format results
function movieFormatResult(movie) {
var markup = "<table class='movie-result'><tr>";
if (movie.posters !== undefined && movie.posters.thumbnail !== undefined) {
markup += "<td class='movie-image'><img src='" + movie.posters.thumbnail + "'/></td>";
}
markup += "<td class='movie-info'><div class='movie-title'>" + movie.title + "</div>";
if (movie.critics_consensus !== undefined) {
markup += "<div class='movie-synopsis'>" + movie.critics_consensus + "</div>";
}
else if (movie.synopsis !== undefined) {
markup += "<div class='movie-synopsis'>" + movie.synopsis + "</div>";
}
markup += "</td></tr></table>";
return markup;
}
// Format selection
function movieFormatSelection(movie) {
return movie.title;
}
}); | response: function(settings) { |
convert_all.py | #! /usr/bin/env python2
import os
import sys
import subprocess
import select
from optparse import OptionParser
# Setup of the command-line arguments parser
text = "Usage: %prog [options] <root-folder>\n\nConvert (in-place) all the BLP files in <root-folder> and its subdirectories"
parser = OptionParser(text, version="%prog 1.0")
parser.add_option("--converter", action="store", default="BLPConverter", type="string",
dest="converter", metavar="CONVERTER",
help="Path to the BLPConverter executable")
parser.add_option("--remove", action="store_true", default=False,
dest="remove", help="Remove the BLP files successfully converted")
parser.add_option("--verbose", action="store_true", default=False,
dest="verbose", help="Verbose output") | # Handling of the arguments
(options, args) = parser.parse_args()
# Check the parameters
if len(args) != 1:
print "No root folder provided"
sys.exit(-1)
root_folder = args[0]
if root_folder[-1] != os.path.sep:
root_folder += os.path.sep
try:
subprocess.Popen('%s --help' % options.converter, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
except:
print "Can't execute BLPConverter at '%s'" % options.converter
sys.exit(-1)
# Walk the root folder
counter_success_total = 0
failed_total = []
for root, dirs, files in os.walk(root_folder):
if root == root_folder:
print "Processing '.'..."
else:
print "Processing '%s'..." % root[len(root_folder):]
blps = filter(lambda x: x.lower().endswith('.blp'), files)
counter_failed = 0
if len(blps) > 0:
current = os.getcwd()
os.chdir(root)
to_convert = blps
while len(to_convert) > 0:
p = subprocess.Popen('%s %s' % (options.converter, ' '.join([ '"%s"' % image for image in to_convert[0:10] ])), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
p.wait()
output = p.stdout.read()
failed = filter(lambda x: not(x.endswith(': OK')) and (len(x) > 0), output.split('\n'))
counter_failed += len(failed)
failed_total.extend(failed)
if options.verbose:
print ' * ' + output[:-1].replace('\n', '\n * ')
if options.remove:
failed2 = map(lambda x: x[0:x.find(':')], failed)
done = filter(lambda x: (x not in failed2) and (len(x) > 0), to_convert[0:10])
p = subprocess.Popen('rm -f %s' % (' '.join([ '"%s"' % image for image in done ])), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
p.wait()
to_convert = to_convert[10:]
os.chdir(current)
if counter_failed > 0:
print '%d images converted, %d images not converted' % (len(blps) - counter_failed, counter_failed)
else:
print '%d images converted' % (len(blps) - counter_failed)
print
counter_success_total += len(blps) - counter_failed
print '----------------------------------------------------------'
if len(failed_total) > 0:
print 'TOTAL: %d images converted, %d images not converted' % (counter_success_total, len(failed_total))
print
print 'Images not converted:'
for image in failed_total:
print ' * ' + image
else:
print 'TOTAL: %d images converted' % counter_success_total | |
bot_unsubscribe_test.go | package telegram
import (
"github.com/erikstmartin/go-testdb"
"github.com/jmoiron/sqlx"
"github.com/stretchr/testify/assert"
"github.com/tucnak/telebot"
"database/sql/driver"
"fmt"
"testing"
)
func TestUnsubscribeNotFound(t *testing.T) {
defer testdb.Reset()
var sendedMessage string
bot := bot{
connect: sqlx.MustOpen("testdb", ""),
telebot: &testSender{
send: func(recipient telebot.Recipient, message string, _ *telebot.SendOptions) error {
sendedMessage = message
return nil
},
},
}
testdb.SetQueryWithArgsFunc(func(query string, args []driver.Value) (driver.Rows, error) {
return testdb.RowsFromSlice(
[]string{"user_id", "telegram_id"},
[][]driver.Value{},
), nil
})
bot.unsubscribe(telebot.Message{
Sender: telebot.User{
Username: "test",
},
})
if assert.NotEmpty(t, sendedMessage) {
assert.Equal(t, fmt.Sprintf(UsernameNotFound, "test"), sendedMessage)
}
}
func TestSubscribeNotSubscribed(t *testing.T) {
defer testdb.Reset()
var sendedMessage string
bot := bot{
connect: sqlx.MustOpen("testdb", ""),
telebot: &testSender{
send: func(recipient telebot.Recipient, message string, _ *telebot.SendOptions) error {
sendedMessage = message
return nil
},
},
}
testdb.SetQueryWithArgsFunc(func(query string, args []driver.Value) (driver.Rows, error) {
return testdb.RowsFromSlice(
[]string{"user_id", "telegram_id"}, | [][]driver.Value{
{
"1",
"0",
},
},
), nil
})
bot.unsubscribe(telebot.Message{
Sender: telebot.User{
Username: "test",
},
})
if assert.NotEmpty(t, sendedMessage) {
assert.Equal(t, NotSubscribed, sendedMessage)
}
}
func TestUnsubscribeSenderIDNotMatching(t *testing.T) {
defer testdb.Reset()
var sendedMessage string
bot := bot{
connect: sqlx.MustOpen("testdb", ""),
telebot: &testSender{
send: func(recipient telebot.Recipient, message string, _ *telebot.SendOptions) error {
sendedMessage = message
return nil
},
},
}
testdb.SetQueryWithArgsFunc(func(query string, args []driver.Value) (driver.Rows, error) {
return testdb.RowsFromSlice(
[]string{"user_id", "telegram_id"},
[][]driver.Value{
{
"1",
"15",
},
},
), nil
})
bot.unsubscribe(telebot.Message{
Sender: telebot.User{
ID: 90,
Username: "test",
},
})
if assert.NotEmpty(t, sendedMessage) {
assert.Equal(t, "Sender ID is not matched", sendedMessage)
}
}
func TestUnsubscribeOk(t *testing.T) {
defer testdb.Reset()
var sendedMessage string
bot := bot{
connect: sqlx.MustOpen("testdb", ""),
telebot: &testSender{
send: func(recipient telebot.Recipient, message string, _ *telebot.SendOptions) error {
sendedMessage = message
return nil
},
},
}
testdb.SetQueryWithArgsFunc(func(query string, args []driver.Value) (driver.Rows, error) {
return testdb.RowsFromSlice(
[]string{"user_id", "telegram_id"},
[][]driver.Value{
{
"1",
"42",
},
},
), nil
})
testdb.SetExecWithArgsFunc(func(query string, args []driver.Value) (driver.Result, error) {
if assert.Len(t, args, 3) {
assert.Equal(t, int64(1), args[0].(int64))
assert.Equal(t, "test", args[1].(string))
assert.Equal(t, int64(0), args[2].(int64))
}
return testdb.NewResult(0, nil, 0, nil), nil
})
bot.unsubscribe(telebot.Message{
Sender: telebot.User{
ID: 42,
Username: "test",
},
})
if assert.NotEmpty(t, sendedMessage) {
assert.Equal(t, "Ok, unsubscribed", sendedMessage)
}
} | |
algorithms.py | import numpy as np
class | (object):
def __init__(self, problem):
self.problem = problem
self.debug = False
self.inv_step = None
@property
def output(self):
"""
Return the 'interesting' part of the problem arguments.
In the regression case, this is the tuple (beta, r).
"""
return self.problem.output
def fit(self):
"""
Abstract method.
"""
raise NotImplementedError
class FISTA(algorithm):
"""
The FISTA generalized gradient algorithm
"""
def fit(self,
max_its=10000,
min_its=5,
tol=1e-5,
FISTA=True,
backtrack=True,
alpha=1.1,
start_inv_step=1.,
restart=np.inf,
coef_stop=False,
return_objective_hist = True,
prox_tol = None,
prox_max_its = None,
prox_debug = None,
monotonicity_restart=True):
#Specify convergence criteria for proximal problem
# This is a bit inconsistent: simple prox functions don't accept tolerance parameters, but when the prox function
# is an optimization (like primal_prox) then it accepts some control paramters. This checks whether the user
# gave the parameters before passing them on
if (prox_tol is not None) or (prox_max_its is not None) or (prox_debug is not None):
set_prox_control = True
if prox_tol is None:
prox_tol = 1e-14
if prox_max_its is None:
prox_max_its = 5000
if prox_debug is None:
prox_debug=False
prox_control = {'tol':prox_tol,
'max_its':prox_max_its,
'debug':prox_debug}
else:
set_prox_control = False
objective_hist = np.zeros(max_its)
if self.inv_step is None:
#If available, use Lipschitz constant from last fit
self.inv_step = start_inv_step
else:
self.inv_step *= 1/alpha
r = self.problem.coefs
t_old = 1.
beta = self.problem.coefs
current_f = self.problem.smooth_eval(r,mode='func')
current_obj = current_f + self.problem.obj_rough(r)
itercount = 0
badstep = 0
while itercount < max_its:
#Restart every 'restart' iterations
if np.mod(itercount+1,restart)==0:
if self.debug:
print "Restarting"
r = self.problem.coefs
t_old = 1.
objective_hist[itercount] = current_obj
# Backtracking loop
if backtrack:
if np.mod(itercount+1,100)==0:
self.inv_step *= 1/alpha
current_f, grad = self.problem.smooth_eval(r,mode='both')
stop = False
while not stop:
if set_prox_control:
beta = self.problem.proximal(r, grad, self.inv_step, prox_control=prox_control)
else:
beta = self.problem.proximal(r, grad, self.inv_step)
trial_f = self.problem.smooth_eval(beta,mode='func')
if np.fabs(trial_f - current_f)/np.max([1.,trial_f]) > 1e-10:
stop = trial_f <= current_f + np.dot(beta-r,grad) + 0.5*self.inv_step*np.linalg.norm(beta-r)**2
else:
trial_grad = self.problem.smooth_eval(beta,mode='grad')
stop = np.fabs(np.dot(beta-r,grad-trial_grad)) <= 0.5*self.inv_step*np.linalg.norm(beta-r)**2
if not stop:
self.inv_step *= alpha
if self.debug:
print "Increasing inv_step", self.inv_step
else:
#Use specified Lipschitz constant
grad = self.problem.smooth_eval(r,mode='grad')
self.inv_step = self.problem.L
if set_prox_control:
beta = self.problem.proximal(r, grad, self.inv_step, prox_control=prox_control)
else:
beta = self.problem.proximal(r, grad, self.inv_step)
trial_f = self.problem.smooth_eval(beta,mode='func')
trial_obj = trial_f + self.problem.obj_rough(beta)
obj_change = np.fabs(trial_obj - current_obj)
obj_rel_change = obj_change/np.fabs(current_obj)
if self.debug:
print itercount, current_obj, self.inv_step, obj_rel_change, np.linalg.norm(self.problem.coefs - beta) / np.max([1.,np.linalg.norm(beta)]), tol
if itercount >= min_its:
if coef_stop:
if np.linalg.norm(self.problem.coefs - beta) / np.max([1.,np.linalg.norm(beta)]) < tol:
self.problem.coefs = beta
break
else:
if obj_rel_change < tol or obj_change < tol:
self.problem.coefs = beta
break
if FISTA:
#Use Nesterov weights
t_new = 0.5 * (1 + np.sqrt(1+4*(t_old**2)))
r = beta + ((t_old-1)/(t_new)) * (beta - self.problem.coefs)
else:
#Just do ISTA
t_new = 1.
r = beta
if current_obj < trial_obj and obj_rel_change > 1e-12 and current_obj > 1e-12 and monotonicity_restart:
#Adaptive restarting: restart if monotonicity violated
if self.debug:
print "\tRestarting", current_obj, trial_obj
current_f = self.problem.smooth_eval(self.problem.coefs,mode='func')
current_obj = current_f + self.problem.obj_rough(self.problem.coefs)
if not set_prox_control and t_old == 1.:
#Gradient step didn't decrease objective: tolerance problems or incorrect prox op... time to give up?
badstep += 1
if badstep > 3:
break
itercount += 1
t_old = 1.
r = self.problem.coefs
else:
self.problem.coefs = beta
t_old = t_new
itercount += 1
current_obj = trial_obj
if self.debug:
print "FISTA used", itercount, "iterations"
if return_objective_hist:
return objective_hist[:itercount]
| algorithm |
bit.rs | /// Module for bit operations (checking, set/reset, etc.)
use jeebie::core::cpu::CPU;
use jeebie::core::registers::Register8::*;
use jeebie::core::registers::Register16::HL;
// 'BIT 0,A' CB 47 8
pub fn BIT_0_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, A);
8
}
// 'BIT 0,B' CB 40 8
pub fn BIT_0_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, B);
8
}
// 'BIT 0,C' CB 41 8
pub fn BIT_0_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, C);
8
}
// 'BIT 0,D' CB 42 8
pub fn BIT_0_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, D);
8
}
// 'BIT 0,E' CB 43 8
pub fn BIT_0_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, E);
8
}
// 'BIT 0,H' CB 44 8
pub fn BIT_0_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, H);
8
}
// 'BIT 0,L' CB 45 8
pub fn BIT_0_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, L);
8
}
// 'BIT 0,(HL)' CB 46 16
pub fn BIT_0_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(0, RegisterAddress(HL));
16
}
// 'BIT 1,A' CB 4F 8
pub fn BIT_1_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, A);
8
}
// 'BIT 1,B' CB 48 8
pub fn BIT_1_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, B);
8
}
// 'BIT 1,C' CB 49 8
pub fn BIT_1_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, C);
8
}
// 'BIT 1,D' CB 4A 8
pub fn BIT_1_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, D);
8
}
// 'BIT 1,E' CB 4B 8
pub fn BIT_1_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, E);
8
}
// 'BIT 1,H' CB 4C 8
pub fn BIT_1_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, H);
8
}
// 'BIT 1,L' CB 4D 8
pub fn BIT_1_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, L);
8
}
// 'BIT 1,(HL)' CB 4E 16
pub fn BIT_1_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(1, RegisterAddress(HL));
16
}
// 'BIT 2,A' CB 57 8
pub fn BIT_2_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, A);
8
}
// 'BIT 2,B' CB 50 8
pub fn BIT_2_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, B);
8
}
// 'BIT 2,C' CB 51 8
pub fn BIT_2_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, C);
8
}
// 'BIT 2,D' CB 52 8
pub fn BIT_2_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, D);
8
}
// 'BIT 2,E' CB 53 8
pub fn BIT_2_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, E);
8
}
// 'BIT 2,H' CB 54 8
pub fn BIT_2_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, H);
8
}
// 'BIT 2,L' CB 55 8
pub fn BIT_2_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, L);
8
}
// 'BIT 2,(HL)' CB 56 16
pub fn BIT_2_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(2, RegisterAddress(HL));
16
}
// 'BIT 3,A' CB 5F 8
pub fn BIT_3_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, A);
8
}
// 'BIT 3,B' CB 58 8
pub fn BIT_3_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, B);
8
}
// 'BIT 3,C' CB 59 8
pub fn BIT_3_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, C);
8
}
// 'BIT 3,D' CB 5A 8
pub fn BIT_3_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, D);
8
}
// 'BIT 3,E' CB 5B 8
pub fn BIT_3_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, E);
8
}
// 'BIT 3,H' CB 5C 8
pub fn BIT_3_H(cpu: &mut CPU) -> i32 |
// 'BIT 3,L' CB 5D 8
pub fn BIT_3_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, L);
8
}
// 'BIT 3,(HL)' CB 5E 16
pub fn BIT_3_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(3, RegisterAddress(HL));
16
}
// 'BIT 4,A' CB 67 8
pub fn BIT_4_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, A);
8
}
// 'BIT 4,B' CB 60 8
pub fn BIT_4_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, B);
8
}
// 'BIT 4,C' CB 61 8
pub fn BIT_4_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, C);
8
}
// 'BIT 4,D' CB 62 8
pub fn BIT_4_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, D);
8
}
// 'BIT 4,E' CB 63 8
pub fn BIT_4_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, E);
8
}
// 'BIT 4,H' CB 64 8
pub fn BIT_4_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, H);
8
}
// 'BIT 4,L' CB 65 8
pub fn BIT_4_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, L);
8
}
// 'BIT 4,(HL)' CB 66 16
pub fn BIT_4_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(4, RegisterAddress(HL));
16
}
// 'BIT 5,A' CB 6F 8
pub fn BIT_5_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, A);
8
}
// 'BIT 5,B' CB 68 8
pub fn BIT_5_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, B);
8
}
// 'BIT 5,C' CB 69 8
pub fn BIT_5_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, C);
8
}
// 'BIT 5,D' CB 6A 8
pub fn BIT_5_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, D);
8
}
// 'BIT 5,E' CB 6B 8
pub fn BIT_5_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, E);
8
}
// 'BIT 5,H' CB 6C 8
pub fn BIT_5_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, H);
8
}
// 'BIT 5,L' CB 6D 8
pub fn BIT_5_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, L);
8
}
// 'BIT 5,(HL)' CB 6E 16
pub fn BIT_5_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(5, RegisterAddress(HL));
16
}
// 'BIT 6,A' CB 77 8
pub fn BIT_6_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, A);
8
}
// 'BIT 6,B' CB 70 8
pub fn BIT_6_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, B);
8
}
// 'BIT 6,C' CB 71 8
pub fn BIT_6_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, C);
8
}
// 'BIT 6,D' CB 72 8
pub fn BIT_6_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, D);
8
}
// 'BIT 6,E' CB 73 8
pub fn BIT_6_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, E);
8
}
// 'BIT 6,H' CB 74 8
pub fn BIT_6_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, H);
8
}
// 'BIT 6,L' CB 75 8
pub fn BIT_6_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, L);
8
}
// 'BIT 6,(HL)' CB 76 16
pub fn BIT_6_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(6, RegisterAddress(HL));
16
}
// 'BIT 7,A' CB 7F 8
pub fn BIT_7_A(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, A);
8
}
// 'BIT 7,B' CB 78 8
pub fn BIT_7_B(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, B);
8
}
// 'BIT 7,C' CB 79 8
pub fn BIT_7_C(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, C);
8
}
// 'BIT 7,D' CB 7A 8
pub fn BIT_7_D(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, D);
8
}
// 'BIT 7,E' CB 7B 8
pub fn BIT_7_E(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, E);
8
}
// 'BIT 7,H' CB 7C 8
pub fn BIT_7_H(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, H);
8
}
// 'BIT 7,L' CB 7D 8
pub fn BIT_7_L(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, L);
8
}
// 'BIT 7,(HL)' CB 7E 16
pub fn BIT_7_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_check(7, RegisterAddress(HL));
16
}
// 'RES 0,A' CB 87 8
pub fn RES_0_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, A);
8
}
// 'RES 0,B' CB 80 8
pub fn RES_0_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, B);
8
}
// 'RES 0,C' CB 81 8
pub fn RES_0_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, C);
8
}
// 'RES 0,D' CB 82 8
pub fn RES_0_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, D);
8
}
// 'RES 0,E' CB 83 8
pub fn RES_0_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, E);
8
}
// 'RES 0,H' CB 84 8
pub fn RES_0_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, H);
8
}
// 'RES 0,L' CB 85 8
pub fn RES_0_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, L);
8
}
// 'RES 0,(HL)' CB 86 16
pub fn RES_0_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(0, RegisterAddress(HL));
16
}
// 'RES 1,A' CB 8F 8
pub fn RES_1_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, A);
8
}
// 'RES 1,B' CB 88 8
pub fn RES_1_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, B);
8
}
// 'RES 1,C' CB 89 8
pub fn RES_1_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, C);
8
}
// 'RES 1,D' CB 8A 8
pub fn RES_1_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, D);
8
}
// 'RES 1,E' CB 8B 8
pub fn RES_1_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, E);
8
}
// 'RES 1,H' CB 8C 8
pub fn RES_1_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, H);
8
}
// 'RES 1,L' CB 8D 8
pub fn RES_1_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, L);
8
}
// 'RES 1,(HL)' CB 8E 16
pub fn RES_1_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(1, RegisterAddress(HL));
16
}
// 'RES 2,A' CB 97 8
pub fn RES_2_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, A);
8
}
// 'RES 2,B' CB 90 8
pub fn RES_2_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, B);
8
}
// 'RES 2,C' CB 91 8
pub fn RES_2_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, C);
8
}
// 'RES 2,D' CB 92 8
pub fn RES_2_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, D);
8
}
// 'RES 2,E' CB 93 8
pub fn RES_2_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, E);
8
}
// 'RES 2,H' CB 94 8
pub fn RES_2_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, H);
8
}
// 'RES 2,L' CB 95 8
pub fn RES_2_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, L);
8
}
// 'RES 2,(HL)' CB 96 16
pub fn RES_2_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(2, RegisterAddress(HL));
16
}
// 'RES 3,A' CB 9F 8
pub fn RES_3_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, A);
8
}
// 'RES 3,B' CB 98 8
pub fn RES_3_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, B);
8
}
// 'RES 3,C' CB 99 8
pub fn RES_3_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, C);
8
}
// 'RES 3,D' CB 9A 8
pub fn RES_3_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, D);
8
}
// 'RES 3,E' CB 9B 8
pub fn RES_3_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, E);
8
}
// 'RES 3,H' CB 9C 8
pub fn RES_3_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, H);
8
}
// 'RES 3,L' CB 9D 8
pub fn RES_3_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, L);
8
}
// 'RES 3,(HL)' CB 9E 16
pub fn RES_3_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(3, RegisterAddress(HL));
16
}
// 'RES 4,A' CB A7 8
pub fn RES_4_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, A);
8
}
// 'RES 4,B' CB A0 8
pub fn RES_4_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, B);
8
}
// 'RES 4,C' CB A1 8
pub fn RES_4_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, C);
8
}
// 'RES 4,D' CB A2 8
pub fn RES_4_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, D);
8
}
// 'RES 4,E' CB A3 8
pub fn RES_4_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, E);
8
}
// 'RES 4,H' CB A4 8
pub fn RES_4_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, H);
8
}
// 'RES 4,L' CB A5 8
pub fn RES_4_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, L);
8
}
// 'RES 4,(HL)' CB A6 16
pub fn RES_4_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(4, RegisterAddress(HL));
16
}
// 'RES 5,A' CB AF 8
pub fn RES_5_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, A);
8
}
// 'RES 5,B' CB A8 8
pub fn RES_5_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, B);
8
}
// 'RES 5,C' CB A9 8
pub fn RES_5_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, C);
8
}
// 'RES 5,D' CB AA 8
pub fn RES_5_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, D);
8
}
// 'RES 5,E' CB AB 8
pub fn RES_5_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, E);
8
}
// 'RES 5,H' CB AC 8
pub fn RES_5_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, H);
8
}
// 'RES 5,L' CB AD 8
pub fn RES_5_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, L);
8
}
// 'RES 5,(HL)' CB AE 16
pub fn RES_5_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(5, RegisterAddress(HL));
16
}
// 'RES 6,A' CB B7 8
pub fn RES_6_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, A);
8
}
// 'RES 6,B' CB B0 8
pub fn RES_6_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, B);
8
}
// 'RES 6,C' CB B1 8
pub fn RES_6_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, C);
8
}
// 'RES 6,D' CB B2 8
pub fn RES_6_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, D);
8
}
// 'RES 6,E' CB B3 8
pub fn RES_6_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, E);
8
}
// 'RES 6,H' CB B4 8
pub fn RES_6_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, H);
8
}
// 'RES 6,L' CB B5 8
pub fn RES_6_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, L);
8
}
// 'RES 6,(HL)' CB B6 16
pub fn RES_6_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(6, RegisterAddress(HL));
16
}
// 'RES 7,A' CB BF 8
pub fn RES_7_A(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, A);
8
}
// 'RES 7,B' CB B8 8
pub fn RES_7_B(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, B);
8
}
// 'RES 7,C' CB B9 8
pub fn RES_7_C(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, C);
8
}
// 'RES 7,D' CB BA 8
pub fn RES_7_D(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, D);
8
}
// 'RES 7,E' CB BB 8
pub fn RES_7_E(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, E);
8
}
// 'RES 7,H' CB BC 8
pub fn RES_7_H(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, H);
8
}
// 'RES 7,L' CB BD 8
pub fn RES_7_L(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, L);
8
}
// 'RES 7,(HL)' CB BE 16
pub fn RES_7_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_reset(7, RegisterAddress(HL));
16
}
// 'SET 0,A' CB C7 8
pub fn SET_0_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, A);
8
}
// 'SET 0,B' CB C0 8
pub fn SET_0_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, B);
8
}
// 'SET 0,C' CB C1 8
pub fn SET_0_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, C);
8
}
// 'SET 0,D' CB C2 8
pub fn SET_0_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, D);
8
}
// 'SET 0,E' CB C3 8
pub fn SET_0_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, E);
8
}
// 'SET 0,H' CB C4 8
pub fn SET_0_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, H);
8
}
// 'SET 0,L' CB C5 8
pub fn SET_0_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, L);
8
}
// 'SET 0,(HL)' CB C6 16
pub fn SET_0_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(0, RegisterAddress(HL));
16
}
// 'SET 1,A' CB CF 8
pub fn SET_1_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, A);
8
}
// 'SET 1,B' CB C8 8
pub fn SET_1_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, B);
8
}
// 'SET 1,C' CB C9 8
pub fn SET_1_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, C);
8
}
// 'SET 1,D' CB CA 8
pub fn SET_1_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, D);
8
}
// 'SET 1,E' CB CB 8
pub fn SET_1_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, E);
8
}
// 'SET 1,H' CB CC 8
pub fn SET_1_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, H);
8
}
// 'SET 1,L' CB CD 8
pub fn SET_1_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, L);
8
}
// 'SET 1,(HL)' CB CE 16
pub fn SET_1_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(1, RegisterAddress(HL));
16
}
// 'SET 2,A' CB D7 8
pub fn SET_2_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, A);
8
}
// 'SET 2,B' CB D0 8
pub fn SET_2_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, B);
8
}
// 'SET 2,C' CB D1 8
pub fn SET_2_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, C);
8
}
// 'SET 2,D' CB D2 8
pub fn SET_2_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, D);
8
}
// 'SET 2,E' CB D3 8
pub fn SET_2_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, E);
8
}
// 'SET 2,H' CB D4 8
pub fn SET_2_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, H);
8
}
// 'SET 2,L' CB D5 8
pub fn SET_2_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, L);
8
}
// 'SET 2,(HL)' CB D6 16
pub fn SET_2_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(2, RegisterAddress(HL));
16
}
// 'SET 3,A' CB DF 8
pub fn SET_3_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, A);
8
}
// 'SET 3,B' CB D8 8
pub fn SET_3_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, B);
8
}
// 'SET 3,C' CB D9 8
pub fn SET_3_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, C);
8
}
// 'SET 3,D' CB DA 8
pub fn SET_3_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, D);
8
}
// 'SET 3,E' CB DB 8
pub fn SET_3_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, E);
8
}
// 'SET 3,H' CB DC 8
pub fn SET_3_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, H);
8
}
// 'SET 3,L' CB DD 8
pub fn SET_3_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, L);
8
}
// 'SET 3,(HL)' CB DE 16
pub fn SET_3_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(3, RegisterAddress(HL));
16
}
// 'SET 4,A' CB E7 8
pub fn SET_4_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, A);
8
}
// 'SET 4,B' CB E0 8
pub fn SET_4_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, B);
8
}
// 'SET 4,C' CB E1 8
pub fn SET_4_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, C);
8
}
// 'SET 4,D' CB E2 8
pub fn SET_4_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, D);
8
}
// 'SET 4,E' CB E3 8
pub fn SET_4_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, E);
8
}
// 'SET 4,H' CB E4 8
pub fn SET_4_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, H);
8
}
// 'SET 4,L' CB E5 8
pub fn SET_4_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, L);
8
}
// 'SET 4,(HL)' CB E6 16
pub fn SET_4_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(4, RegisterAddress(HL));
16
}
// 'SET 5,A' CB EF 8
pub fn SET_5_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, A);
8
}
// 'SET 5,B' CB E8 8
pub fn SET_5_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, B);
8
}
// 'SET 5,C' CB E9 8
pub fn SET_5_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, C);
8
}
// 'SET 5,D' CB EA 8
pub fn SET_5_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, D);
8
}
// 'SET 5,E' CB EB 8
pub fn SET_5_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, E);
8
}
// 'SET 5,H' CB EC 8
pub fn SET_5_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, H);
8
}
// 'SET 5,L' CB ED 8
pub fn SET_5_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, L);
8
}
// 'SET 5,(HL)' CB EE 16
pub fn SET_5_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(5, RegisterAddress(HL));
16
}
// 'SET 6,A' CB F7 8
pub fn SET_6_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, A);
8
}
// 'SET 6,B' CB F0 8
pub fn SET_6_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, B);
8
}
// 'SET 6,C' CB F1 8
pub fn SET_6_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, C);
8
}
// 'SET 6,D' CB F2 8
pub fn SET_6_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, D);
8
}
// 'SET 6,E' CB F3 8
pub fn SET_6_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, E);
8
}
// 'SET 6,H' CB F4 8
pub fn SET_6_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, H);
8
}
// 'SET 6,L' CB F5 8
pub fn SET_6_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, L);
8
}
// 'SET 6,(HL)' CB F6 16
pub fn SET_6_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(6, RegisterAddress(HL));
16
}
// 'SET 7,A' CB FF 8
pub fn SET_7_A(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, A);
8
}
// 'SET 7,B' CB F8 8
pub fn SET_7_B(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, B);
8
}
// 'SET 7,C' CB F9 8
pub fn SET_7_C(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, C);
8
}
// 'SET 7,D' CB FA 8
pub fn SET_7_D(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, D);
8
}
// 'SET 7,E' CB FB 8
pub fn SET_7_E(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, E);
8
}
// 'SET 7,H' CB FC 8
pub fn SET_7_H(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, H);
8
}
// 'SET 7,L' CB FD 8
pub fn SET_7_L(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, L);
8
}
// 'SET 7,(HL)' CB FE 16
pub fn SET_7_HLm(cpu: &mut CPU) -> i32 {
cpu.bit_set(7, RegisterAddress(HL));
16
}
| {
cpu.bit_check(3, H);
8
} |
__init__.py | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.conf import settings
from django.utils.timezone import (
get_default_timezone, localtime, is_naive, make_aware)
from datetime import datetime
from uw_sws import SWS_DAO, sws_now
from abc import ABC, abstractmethod
def __update_get(self, url, response):
pass
# Replace the SWS _update_get method to prevent tampering with mocked resources
SWS_DAO._update_get = __update_get
def current_datetime():
override_dt = getattr(settings, "CURRENT_DATETIME_OVERRIDE", None)
if override_dt is not None:
return datetime.strptime(override_dt, "%Y-%m-%d %H:%M:%S")
else:
return sws_now()
def display_datetime(dt):
if is_naive(dt):
dt = make_aware(dt, get_default_timezone())
else:
dt = localtime(dt)
return dt.strftime("%B %d at %l:%M %p %Z")
class GradeImportSource(ABC):
true_values = ["1", "y", "yes", "true"]
@abstractmethod
def grades_for_section(self, section, instructor, **kwargs):
pass
def is_true(self, val):
| return (val is not None and val.lower() in self.true_values) |
|
PaymentRateManager.py | from lbrynet.core.Strategy import get_default_strategy, OnlyFreeStrategy
from lbrynet import conf
from decimal import Decimal
class BasePaymentRateManager(object):
def __init__(self, rate=None, info_rate=None):
self.min_blob_data_payment_rate = rate if rate is not None else conf.settings['data_rate']
self.min_blob_info_payment_rate = (
info_rate if info_rate is not None else conf.settings['min_info_rate'])
class PaymentRateManager(object):
def __init__(self, base, rate=None):
"""
@param base: a BasePaymentRateManager
@param rate: the min blob data payment rate
"""
self.base = base
self.min_blob_data_payment_rate = rate
self.points_paid = 0.0
def get_rate_blob_data(self, peer):
return self.get_effective_min_blob_data_payment_rate()
def accept_rate_blob_data(self, peer, payment_rate):
return payment_rate >= self.get_effective_min_blob_data_payment_rate()
def get_effective_min_blob_data_payment_rate(self):
if self.min_blob_data_payment_rate is None:
return self.base.min_blob_data_payment_rate
return self.min_blob_data_payment_rate
def record_points_paid(self, amount):
self.points_paid += amount
class NegotiatedPaymentRateManager(object):
def __init__(self, base, availability_tracker, generous=None):
"""
@param base: a BasePaymentRateManager
@param availability_tracker: a BlobAvailabilityTracker
@param rate: the min blob data payment rate
"""
self.base = base
self.min_blob_data_payment_rate = base.min_blob_data_payment_rate
self.points_paid = 0.0
self.blob_tracker = availability_tracker
self.generous = generous if generous is not None else conf.settings['is_generous_host']
self.strategy = get_default_strategy(self.blob_tracker,
base_price=self.base.min_blob_data_payment_rate,
is_generous=generous)
def get_rate_blob_data(self, peer, blobs):
response = self.strategy.make_offer(peer, blobs)
return response.rate
def accept_rate_blob_data(self, peer, blobs, offer):
offer = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, offer)
return offer.is_accepted
def reply_to_offer(self, peer, blobs, offer):
reply = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, reply)
return reply
def | (self, peer):
return self.strategy.accepted_offers.get(peer, False)
def record_points_paid(self, amount):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
return (offer.is_too_low and
round(Decimal.from_float(offer.rate), 5) >= round(self.strategy.max_rate, 5))
return False
class OnlyFreePaymentsManager(object):
def __init__(self, **kwargs):
"""
A payment rate manager that will only ever accept and offer a rate of 0.0,
Used for testing
"""
self.base = BasePaymentRateManager(0.0, 0.0)
self.points_paid = 0.0
self.generous = True
self.strategy = OnlyFreeStrategy()
def get_rate_blob_data(self, peer, blobs):
response = self.strategy.make_offer(peer, blobs)
return response.rate
def accept_rate_blob_data(self, peer, blobs, offer):
offer = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, offer)
return offer.is_accepted
def reply_to_offer(self, peer, blobs, offer):
reply = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, reply)
return reply
def get_rate_for_peer(self, peer):
return self.strategy.accepted_offers.get(peer, False)
def record_points_paid(self, amount):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
if offer.rate > 0.0:
return True
return False
| get_rate_for_peer |
ethernet_dma.rs |
::bobbin_mcu::periph!( ETHERNET_DMA, EthernetDma, ETHERNET_DMA_PERIPH, EthernetDmaPeriph, ETHERNET_DMA_OWNED, ETHERNET_DMA_REF_COUNT, 0x40029000, 0x00, 0x08);
#[doc="Ethernet: DMA controller operation"]
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct EthernetDmaPeriph(pub usize);
impl EthernetDmaPeriph {
#[doc="Get the DMABMR Register."]
#[inline] pub fn dmabmr_reg(&self) -> ::bobbin_mcu::register::Register<Dmabmr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmabmr, 0x0)
}
#[doc="Get the *mut pointer for the DMABMR register."]
#[inline] pub fn dmabmr_mut(&self) -> *mut Dmabmr {
self.dmabmr_reg().ptr()
}
#[doc="Get the *const pointer for the DMABMR register."]
#[inline] pub fn dmabmr_ptr(&self) -> *const Dmabmr {
self.dmabmr_reg().ptr()
}
#[doc="Read the DMABMR register."]
#[inline] pub fn dmabmr(&self) -> Dmabmr {
self.dmabmr_reg().read()
}
#[doc="Write the DMABMR register."]
#[inline] pub fn write_dmabmr(&self, value: Dmabmr) -> &Self {
self.dmabmr_reg().write(value);
self
}
#[doc="Set the DMABMR register."]
#[inline] pub fn set_dmabmr<F: FnOnce(Dmabmr) -> Dmabmr>(&self, f: F) -> &Self {
self.dmabmr_reg().set(f);
self
}
#[doc="Modify the DMABMR register."]
#[inline] pub fn with_dmabmr<F: FnOnce(Dmabmr) -> Dmabmr>(&self, f: F) -> &Self {
self.dmabmr_reg().with(f);
self
}
#[doc="Get the DMATPDR Register."]
#[inline] pub fn dmatpdr_reg(&self) -> ::bobbin_mcu::register::Register<Dmatpdr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmatpdr, 0x4)
}
#[doc="Get the *mut pointer for the DMATPDR register."]
#[inline] pub fn dmatpdr_mut(&self) -> *mut Dmatpdr {
self.dmatpdr_reg().ptr()
}
#[doc="Get the *const pointer for the DMATPDR register."]
#[inline] pub fn dmatpdr_ptr(&self) -> *const Dmatpdr {
self.dmatpdr_reg().ptr()
}
#[doc="Read the DMATPDR register."]
#[inline] pub fn dmatpdr(&self) -> Dmatpdr {
self.dmatpdr_reg().read()
}
#[doc="Write the DMATPDR register."]
#[inline] pub fn write_dmatpdr(&self, value: Dmatpdr) -> &Self {
self.dmatpdr_reg().write(value);
self
}
#[doc="Set the DMATPDR register."]
#[inline] pub fn set_dmatpdr<F: FnOnce(Dmatpdr) -> Dmatpdr>(&self, f: F) -> &Self {
self.dmatpdr_reg().set(f);
self
}
#[doc="Modify the DMATPDR register."]
#[inline] pub fn with_dmatpdr<F: FnOnce(Dmatpdr) -> Dmatpdr>(&self, f: F) -> &Self {
self.dmatpdr_reg().with(f);
self
}
#[doc="Get the DMARPDR Register."]
#[inline] pub fn dmarpdr_reg(&self) -> ::bobbin_mcu::register::Register<Dmarpdr> |
#[doc="Get the *mut pointer for the DMARPDR register."]
#[inline] pub fn dmarpdr_mut(&self) -> *mut Dmarpdr {
self.dmarpdr_reg().ptr()
}
#[doc="Get the *const pointer for the DMARPDR register."]
#[inline] pub fn dmarpdr_ptr(&self) -> *const Dmarpdr {
self.dmarpdr_reg().ptr()
}
#[doc="Read the DMARPDR register."]
#[inline] pub fn dmarpdr(&self) -> Dmarpdr {
self.dmarpdr_reg().read()
}
#[doc="Write the DMARPDR register."]
#[inline] pub fn write_dmarpdr(&self, value: Dmarpdr) -> &Self {
self.dmarpdr_reg().write(value);
self
}
#[doc="Set the DMARPDR register."]
#[inline] pub fn set_dmarpdr<F: FnOnce(Dmarpdr) -> Dmarpdr>(&self, f: F) -> &Self {
self.dmarpdr_reg().set(f);
self
}
#[doc="Modify the DMARPDR register."]
#[inline] pub fn with_dmarpdr<F: FnOnce(Dmarpdr) -> Dmarpdr>(&self, f: F) -> &Self {
self.dmarpdr_reg().with(f);
self
}
#[doc="Get the DMARDLAR Register."]
#[inline] pub fn dmardlar_reg(&self) -> ::bobbin_mcu::register::Register<Dmardlar> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmardlar, 0xc)
}
#[doc="Get the *mut pointer for the DMARDLAR register."]
#[inline] pub fn dmardlar_mut(&self) -> *mut Dmardlar {
self.dmardlar_reg().ptr()
}
#[doc="Get the *const pointer for the DMARDLAR register."]
#[inline] pub fn dmardlar_ptr(&self) -> *const Dmardlar {
self.dmardlar_reg().ptr()
}
#[doc="Read the DMARDLAR register."]
#[inline] pub fn dmardlar(&self) -> Dmardlar {
self.dmardlar_reg().read()
}
#[doc="Write the DMARDLAR register."]
#[inline] pub fn write_dmardlar(&self, value: Dmardlar) -> &Self {
self.dmardlar_reg().write(value);
self
}
#[doc="Set the DMARDLAR register."]
#[inline] pub fn set_dmardlar<F: FnOnce(Dmardlar) -> Dmardlar>(&self, f: F) -> &Self {
self.dmardlar_reg().set(f);
self
}
#[doc="Modify the DMARDLAR register."]
#[inline] pub fn with_dmardlar<F: FnOnce(Dmardlar) -> Dmardlar>(&self, f: F) -> &Self {
self.dmardlar_reg().with(f);
self
}
#[doc="Get the DMATDLAR Register."]
#[inline] pub fn dmatdlar_reg(&self) -> ::bobbin_mcu::register::Register<Dmatdlar> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmatdlar, 0x10)
}
#[doc="Get the *mut pointer for the DMATDLAR register."]
#[inline] pub fn dmatdlar_mut(&self) -> *mut Dmatdlar {
self.dmatdlar_reg().ptr()
}
#[doc="Get the *const pointer for the DMATDLAR register."]
#[inline] pub fn dmatdlar_ptr(&self) -> *const Dmatdlar {
self.dmatdlar_reg().ptr()
}
#[doc="Read the DMATDLAR register."]
#[inline] pub fn dmatdlar(&self) -> Dmatdlar {
self.dmatdlar_reg().read()
}
#[doc="Write the DMATDLAR register."]
#[inline] pub fn write_dmatdlar(&self, value: Dmatdlar) -> &Self {
self.dmatdlar_reg().write(value);
self
}
#[doc="Set the DMATDLAR register."]
#[inline] pub fn set_dmatdlar<F: FnOnce(Dmatdlar) -> Dmatdlar>(&self, f: F) -> &Self {
self.dmatdlar_reg().set(f);
self
}
#[doc="Modify the DMATDLAR register."]
#[inline] pub fn with_dmatdlar<F: FnOnce(Dmatdlar) -> Dmatdlar>(&self, f: F) -> &Self {
self.dmatdlar_reg().with(f);
self
}
#[doc="Get the DMASR Register."]
#[inline] pub fn dmasr_reg(&self) -> ::bobbin_mcu::register::Register<Dmasr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmasr, 0x14)
}
#[doc="Get the *mut pointer for the DMASR register."]
#[inline] pub fn dmasr_mut(&self) -> *mut Dmasr {
self.dmasr_reg().ptr()
}
#[doc="Get the *const pointer for the DMASR register."]
#[inline] pub fn dmasr_ptr(&self) -> *const Dmasr {
self.dmasr_reg().ptr()
}
#[doc="Read the DMASR register."]
#[inline] pub fn dmasr(&self) -> Dmasr {
self.dmasr_reg().read()
}
#[doc="Write the DMASR register."]
#[inline] pub fn write_dmasr(&self, value: Dmasr) -> &Self {
self.dmasr_reg().write(value);
self
}
#[doc="Set the DMASR register."]
#[inline] pub fn set_dmasr<F: FnOnce(Dmasr) -> Dmasr>(&self, f: F) -> &Self {
self.dmasr_reg().set(f);
self
}
#[doc="Modify the DMASR register."]
#[inline] pub fn with_dmasr<F: FnOnce(Dmasr) -> Dmasr>(&self, f: F) -> &Self {
self.dmasr_reg().with(f);
self
}
#[doc="Get the DMAOMR Register."]
#[inline] pub fn dmaomr_reg(&self) -> ::bobbin_mcu::register::Register<Dmaomr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmaomr, 0x18)
}
#[doc="Get the *mut pointer for the DMAOMR register."]
#[inline] pub fn dmaomr_mut(&self) -> *mut Dmaomr {
self.dmaomr_reg().ptr()
}
#[doc="Get the *const pointer for the DMAOMR register."]
#[inline] pub fn dmaomr_ptr(&self) -> *const Dmaomr {
self.dmaomr_reg().ptr()
}
#[doc="Read the DMAOMR register."]
#[inline] pub fn dmaomr(&self) -> Dmaomr {
self.dmaomr_reg().read()
}
#[doc="Write the DMAOMR register."]
#[inline] pub fn write_dmaomr(&self, value: Dmaomr) -> &Self {
self.dmaomr_reg().write(value);
self
}
#[doc="Set the DMAOMR register."]
#[inline] pub fn set_dmaomr<F: FnOnce(Dmaomr) -> Dmaomr>(&self, f: F) -> &Self {
self.dmaomr_reg().set(f);
self
}
#[doc="Modify the DMAOMR register."]
#[inline] pub fn with_dmaomr<F: FnOnce(Dmaomr) -> Dmaomr>(&self, f: F) -> &Self {
self.dmaomr_reg().with(f);
self
}
#[doc="Get the DMAIER Register."]
#[inline] pub fn dmaier_reg(&self) -> ::bobbin_mcu::register::Register<Dmaier> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmaier, 0x1c)
}
#[doc="Get the *mut pointer for the DMAIER register."]
#[inline] pub fn dmaier_mut(&self) -> *mut Dmaier {
self.dmaier_reg().ptr()
}
#[doc="Get the *const pointer for the DMAIER register."]
#[inline] pub fn dmaier_ptr(&self) -> *const Dmaier {
self.dmaier_reg().ptr()
}
#[doc="Read the DMAIER register."]
#[inline] pub fn dmaier(&self) -> Dmaier {
self.dmaier_reg().read()
}
#[doc="Write the DMAIER register."]
#[inline] pub fn write_dmaier(&self, value: Dmaier) -> &Self {
self.dmaier_reg().write(value);
self
}
#[doc="Set the DMAIER register."]
#[inline] pub fn set_dmaier<F: FnOnce(Dmaier) -> Dmaier>(&self, f: F) -> &Self {
self.dmaier_reg().set(f);
self
}
#[doc="Modify the DMAIER register."]
#[inline] pub fn with_dmaier<F: FnOnce(Dmaier) -> Dmaier>(&self, f: F) -> &Self {
self.dmaier_reg().with(f);
self
}
#[doc="Get the DMAMFBOCR Register."]
#[inline] pub fn dmamfbocr_reg(&self) -> ::bobbin_mcu::register::Register<Dmamfbocr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmamfbocr, 0x20)
}
#[doc="Get the *mut pointer for the DMAMFBOCR register."]
#[inline] pub fn dmamfbocr_mut(&self) -> *mut Dmamfbocr {
self.dmamfbocr_reg().ptr()
}
#[doc="Get the *const pointer for the DMAMFBOCR register."]
#[inline] pub fn dmamfbocr_ptr(&self) -> *const Dmamfbocr {
self.dmamfbocr_reg().ptr()
}
#[doc="Read the DMAMFBOCR register."]
#[inline] pub fn dmamfbocr(&self) -> Dmamfbocr {
self.dmamfbocr_reg().read()
}
#[doc="Write the DMAMFBOCR register."]
#[inline] pub fn write_dmamfbocr(&self, value: Dmamfbocr) -> &Self {
self.dmamfbocr_reg().write(value);
self
}
#[doc="Set the DMAMFBOCR register."]
#[inline] pub fn set_dmamfbocr<F: FnOnce(Dmamfbocr) -> Dmamfbocr>(&self, f: F) -> &Self {
self.dmamfbocr_reg().set(f);
self
}
#[doc="Modify the DMAMFBOCR register."]
#[inline] pub fn with_dmamfbocr<F: FnOnce(Dmamfbocr) -> Dmamfbocr>(&self, f: F) -> &Self {
self.dmamfbocr_reg().with(f);
self
}
#[doc="Get the DMARSWTR Register."]
#[inline] pub fn dmarswtr_reg(&self) -> ::bobbin_mcu::register::Register<Dmarswtr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmarswtr, 0x24)
}
#[doc="Get the *mut pointer for the DMARSWTR register."]
#[inline] pub fn dmarswtr_mut(&self) -> *mut Dmarswtr {
self.dmarswtr_reg().ptr()
}
#[doc="Get the *const pointer for the DMARSWTR register."]
#[inline] pub fn dmarswtr_ptr(&self) -> *const Dmarswtr {
self.dmarswtr_reg().ptr()
}
#[doc="Read the DMARSWTR register."]
#[inline] pub fn dmarswtr(&self) -> Dmarswtr {
self.dmarswtr_reg().read()
}
#[doc="Write the DMARSWTR register."]
#[inline] pub fn write_dmarswtr(&self, value: Dmarswtr) -> &Self {
self.dmarswtr_reg().write(value);
self
}
#[doc="Set the DMARSWTR register."]
#[inline] pub fn set_dmarswtr<F: FnOnce(Dmarswtr) -> Dmarswtr>(&self, f: F) -> &Self {
self.dmarswtr_reg().set(f);
self
}
#[doc="Modify the DMARSWTR register."]
#[inline] pub fn with_dmarswtr<F: FnOnce(Dmarswtr) -> Dmarswtr>(&self, f: F) -> &Self {
self.dmarswtr_reg().with(f);
self
}
#[doc="Get the DMACHTDR Register."]
#[inline] pub fn dmachtdr_reg(&self) -> ::bobbin_mcu::register::Register<Dmachtdr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmachtdr, 0x48)
}
#[doc="Get the *mut pointer for the DMACHTDR register."]
#[inline] pub fn dmachtdr_mut(&self) -> *mut Dmachtdr {
self.dmachtdr_reg().ptr()
}
#[doc="Get the *const pointer for the DMACHTDR register."]
#[inline] pub fn dmachtdr_ptr(&self) -> *const Dmachtdr {
self.dmachtdr_reg().ptr()
}
#[doc="Read the DMACHTDR register."]
#[inline] pub fn dmachtdr(&self) -> Dmachtdr {
self.dmachtdr_reg().read()
}
#[doc="Get the DMACHRDR Register."]
#[inline] pub fn dmachrdr_reg(&self) -> ::bobbin_mcu::register::Register<Dmachrdr> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmachrdr, 0x4c)
}
#[doc="Get the *mut pointer for the DMACHRDR register."]
#[inline] pub fn dmachrdr_mut(&self) -> *mut Dmachrdr {
self.dmachrdr_reg().ptr()
}
#[doc="Get the *const pointer for the DMACHRDR register."]
#[inline] pub fn dmachrdr_ptr(&self) -> *const Dmachrdr {
self.dmachrdr_reg().ptr()
}
#[doc="Read the DMACHRDR register."]
#[inline] pub fn dmachrdr(&self) -> Dmachrdr {
self.dmachrdr_reg().read()
}
#[doc="Get the DMACHTBAR Register."]
#[inline] pub fn dmachtbar_reg(&self) -> ::bobbin_mcu::register::Register<Dmachtbar> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmachtbar, 0x50)
}
#[doc="Get the *mut pointer for the DMACHTBAR register."]
#[inline] pub fn dmachtbar_mut(&self) -> *mut Dmachtbar {
self.dmachtbar_reg().ptr()
}
#[doc="Get the *const pointer for the DMACHTBAR register."]
#[inline] pub fn dmachtbar_ptr(&self) -> *const Dmachtbar {
self.dmachtbar_reg().ptr()
}
#[doc="Read the DMACHTBAR register."]
#[inline] pub fn dmachtbar(&self) -> Dmachtbar {
self.dmachtbar_reg().read()
}
#[doc="Get the DMACHRBAR Register."]
#[inline] pub fn dmachrbar_reg(&self) -> ::bobbin_mcu::register::Register<Dmachrbar> {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmachrbar, 0x54)
}
#[doc="Get the *mut pointer for the DMACHRBAR register."]
#[inline] pub fn dmachrbar_mut(&self) -> *mut Dmachrbar {
self.dmachrbar_reg().ptr()
}
#[doc="Get the *const pointer for the DMACHRBAR register."]
#[inline] pub fn dmachrbar_ptr(&self) -> *const Dmachrbar {
self.dmachrbar_reg().ptr()
}
#[doc="Read the DMACHRBAR register."]
#[inline] pub fn dmachrbar(&self) -> Dmachrbar {
self.dmachrbar_reg().read()
}
}
#[doc="Ethernet DMA bus mode register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmabmr(pub u32);
impl Dmabmr {
#[doc="no description available"]
#[inline] pub fn sr(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0x1) as u8) } // [0]
}
#[doc="Returns true if SR != 0"]
#[inline] pub fn test_sr(&self) -> bool {
self.sr() != 0
}
#[doc="Sets the SR field."]
#[inline] pub fn set_sr<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 0);
self.0 |= value << 0;
self
}
#[doc="no description available"]
#[inline] pub fn da(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 1) & 0x1) as u8) } // [1]
}
#[doc="Returns true if DA != 0"]
#[inline] pub fn test_da(&self) -> bool {
self.da() != 0
}
#[doc="Sets the DA field."]
#[inline] pub fn set_da<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 1);
self.0 |= value << 1;
self
}
#[doc="no description available"]
#[inline] pub fn dsl(&self) -> ::bobbin_bits::U5 {
unsafe { ::core::mem::transmute(((self.0 >> 2) & 0x1f) as u8) } // [6:2]
}
#[doc="Returns true if DSL != 0"]
#[inline] pub fn test_dsl(&self) -> bool {
self.dsl() != 0
}
#[doc="Sets the DSL field."]
#[inline] pub fn set_dsl<V: Into<::bobbin_bits::U5>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U5 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1f << 2);
self.0 |= value << 2;
self
}
#[doc="no description available"]
#[inline] pub fn edfe(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 7) & 0x1) as u8) } // [7]
}
#[doc="Returns true if EDFE != 0"]
#[inline] pub fn test_edfe(&self) -> bool {
self.edfe() != 0
}
#[doc="Sets the EDFE field."]
#[inline] pub fn set_edfe<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 7);
self.0 |= value << 7;
self
}
#[doc="no description available"]
#[inline] pub fn pbl(&self) -> ::bobbin_bits::U6 {
unsafe { ::core::mem::transmute(((self.0 >> 8) & 0x3f) as u8) } // [13:8]
}
#[doc="Returns true if PBL != 0"]
#[inline] pub fn test_pbl(&self) -> bool {
self.pbl() != 0
}
#[doc="Sets the PBL field."]
#[inline] pub fn set_pbl<V: Into<::bobbin_bits::U6>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U6 = value.into();
let value: u32 = value.into();
self.0 &= !(0x3f << 8);
self.0 |= value << 8;
self
}
#[doc="no description available"]
#[inline] pub fn rtpr(&self) -> ::bobbin_bits::U2 {
unsafe { ::core::mem::transmute(((self.0 >> 14) & 0x3) as u8) } // [15:14]
}
#[doc="Returns true if RTPR != 0"]
#[inline] pub fn test_rtpr(&self) -> bool {
self.rtpr() != 0
}
#[doc="Sets the RTPR field."]
#[inline] pub fn set_rtpr<V: Into<::bobbin_bits::U2>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U2 = value.into();
let value: u32 = value.into();
self.0 &= !(0x3 << 14);
self.0 |= value << 14;
self
}
#[doc="no description available"]
#[inline] pub fn fb(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 16) & 0x1) as u8) } // [16]
}
#[doc="Returns true if FB != 0"]
#[inline] pub fn test_fb(&self) -> bool {
self.fb() != 0
}
#[doc="Sets the FB field."]
#[inline] pub fn set_fb<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 16);
self.0 |= value << 16;
self
}
#[doc="no description available"]
#[inline] pub fn rdp(&self) -> ::bobbin_bits::U6 {
unsafe { ::core::mem::transmute(((self.0 >> 17) & 0x3f) as u8) } // [22:17]
}
#[doc="Returns true if RDP != 0"]
#[inline] pub fn test_rdp(&self) -> bool {
self.rdp() != 0
}
#[doc="Sets the RDP field."]
#[inline] pub fn set_rdp<V: Into<::bobbin_bits::U6>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U6 = value.into();
let value: u32 = value.into();
self.0 &= !(0x3f << 17);
self.0 |= value << 17;
self
}
#[doc="no description available"]
#[inline] pub fn usp(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 23) & 0x1) as u8) } // [23]
}
#[doc="Returns true if USP != 0"]
#[inline] pub fn test_usp(&self) -> bool {
self.usp() != 0
}
#[doc="Sets the USP field."]
#[inline] pub fn set_usp<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 23);
self.0 |= value << 23;
self
}
#[doc="no description available"]
#[inline] pub fn fpm(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 24) & 0x1) as u8) } // [24]
}
#[doc="Returns true if FPM != 0"]
#[inline] pub fn test_fpm(&self) -> bool {
self.fpm() != 0
}
#[doc="Sets the FPM field."]
#[inline] pub fn set_fpm<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 24);
self.0 |= value << 24;
self
}
#[doc="no description available"]
#[inline] pub fn aab(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 25) & 0x1) as u8) } // [25]
}
#[doc="Returns true if AAB != 0"]
#[inline] pub fn test_aab(&self) -> bool {
self.aab() != 0
}
#[doc="Sets the AAB field."]
#[inline] pub fn set_aab<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 25);
self.0 |= value << 25;
self
}
#[doc="no description available"]
#[inline] pub fn mb(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 26) & 0x1) as u8) } // [26]
}
#[doc="Returns true if MB != 0"]
#[inline] pub fn test_mb(&self) -> bool {
self.mb() != 0
}
#[doc="Sets the MB field."]
#[inline] pub fn set_mb<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 26);
self.0 |= value << 26;
self
}
}
impl From<u32> for Dmabmr {
#[inline]
fn from(other: u32) -> Self {
Dmabmr(other)
}
}
impl ::core::fmt::Display for Dmabmr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmabmr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
if self.sr() != 0 { try!(write!(f, " sr"))}
if self.da() != 0 { try!(write!(f, " da"))}
if self.dsl() != 0 { try!(write!(f, " dsl=0x{:x}", self.dsl()))}
if self.edfe() != 0 { try!(write!(f, " edfe"))}
if self.pbl() != 0 { try!(write!(f, " pbl=0x{:x}", self.pbl()))}
if self.rtpr() != 0 { try!(write!(f, " rtpr=0x{:x}", self.rtpr()))}
if self.fb() != 0 { try!(write!(f, " fb"))}
if self.rdp() != 0 { try!(write!(f, " rdp=0x{:x}", self.rdp()))}
if self.usp() != 0 { try!(write!(f, " usp"))}
if self.fpm() != 0 { try!(write!(f, " fpm"))}
if self.aab() != 0 { try!(write!(f, " aab"))}
if self.mb() != 0 { try!(write!(f, " mb"))}
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA transmit poll demand register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmatpdr(pub u32);
impl Dmatpdr {
#[doc="no description available"]
#[inline] pub fn tpd(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if TPD != 0"]
#[inline] pub fn test_tpd(&self) -> bool {
self.tpd() != 0
}
#[doc="Sets the TPD field."]
#[inline] pub fn set_tpd<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmatpdr {
#[inline]
fn from(other: u32) -> Self {
Dmatpdr(other)
}
}
impl ::core::fmt::Display for Dmatpdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmatpdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="EHERNET DMA receive poll demand register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmarpdr(pub u32);
impl Dmarpdr {
#[doc="RPD"]
#[inline] pub fn rpd(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if RPD != 0"]
#[inline] pub fn test_rpd(&self) -> bool {
self.rpd() != 0
}
#[doc="Sets the RPD field."]
#[inline] pub fn set_rpd<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmarpdr {
#[inline]
fn from(other: u32) -> Self {
Dmarpdr(other)
}
}
impl ::core::fmt::Display for Dmarpdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmarpdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA receive descriptor list address register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmardlar(pub u32);
impl Dmardlar {
#[doc="no description available"]
#[inline] pub fn srl(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if SRL != 0"]
#[inline] pub fn test_srl(&self) -> bool {
self.srl() != 0
}
#[doc="Sets the SRL field."]
#[inline] pub fn set_srl<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmardlar {
#[inline]
fn from(other: u32) -> Self {
Dmardlar(other)
}
}
impl ::core::fmt::Display for Dmardlar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmardlar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA transmit descriptor list address register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmatdlar(pub u32);
impl Dmatdlar {
#[doc="no description available"]
#[inline] pub fn stl(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if STL != 0"]
#[inline] pub fn test_stl(&self) -> bool {
self.stl() != 0
}
#[doc="Sets the STL field."]
#[inline] pub fn set_stl<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmatdlar {
#[inline]
fn from(other: u32) -> Self {
Dmatdlar(other)
}
}
impl ::core::fmt::Display for Dmatdlar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmatdlar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA status register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmasr(pub u32);
impl Dmasr {
#[doc="no description available"]
#[inline] pub fn ts(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0x1) as u8) } // [0]
}
#[doc="Returns true if TS != 0"]
#[inline] pub fn test_ts(&self) -> bool {
self.ts() != 0
}
#[doc="Sets the TS field."]
#[inline] pub fn set_ts<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 0);
self.0 |= value << 0;
self
}
#[doc="no description available"]
#[inline] pub fn tpss(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 1) & 0x1) as u8) } // [1]
}
#[doc="Returns true if TPSS != 0"]
#[inline] pub fn test_tpss(&self) -> bool {
self.tpss() != 0
}
#[doc="Sets the TPSS field."]
#[inline] pub fn set_tpss<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 1);
self.0 |= value << 1;
self
}
#[doc="no description available"]
#[inline] pub fn tbus(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 2) & 0x1) as u8) } // [2]
}
#[doc="Returns true if TBUS != 0"]
#[inline] pub fn test_tbus(&self) -> bool {
self.tbus() != 0
}
#[doc="Sets the TBUS field."]
#[inline] pub fn set_tbus<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 2);
self.0 |= value << 2;
self
}
#[doc="no description available"]
#[inline] pub fn tjts(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 3) & 0x1) as u8) } // [3]
}
#[doc="Returns true if TJTS != 0"]
#[inline] pub fn test_tjts(&self) -> bool {
self.tjts() != 0
}
#[doc="Sets the TJTS field."]
#[inline] pub fn set_tjts<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 3);
self.0 |= value << 3;
self
}
#[doc="no description available"]
#[inline] pub fn ros(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 4) & 0x1) as u8) } // [4]
}
#[doc="Returns true if ROS != 0"]
#[inline] pub fn test_ros(&self) -> bool {
self.ros() != 0
}
#[doc="Sets the ROS field."]
#[inline] pub fn set_ros<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 4);
self.0 |= value << 4;
self
}
#[doc="no description available"]
#[inline] pub fn tus(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 5) & 0x1) as u8) } // [5]
}
#[doc="Returns true if TUS != 0"]
#[inline] pub fn test_tus(&self) -> bool {
self.tus() != 0
}
#[doc="Sets the TUS field."]
#[inline] pub fn set_tus<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 5);
self.0 |= value << 5;
self
}
#[doc="no description available"]
#[inline] pub fn rs(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 6) & 0x1) as u8) } // [6]
}
#[doc="Returns true if RS != 0"]
#[inline] pub fn test_rs(&self) -> bool {
self.rs() != 0
}
#[doc="Sets the RS field."]
#[inline] pub fn set_rs<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 6);
self.0 |= value << 6;
self
}
#[doc="no description available"]
#[inline] pub fn rbus(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 7) & 0x1) as u8) } // [7]
}
#[doc="Returns true if RBUS != 0"]
#[inline] pub fn test_rbus(&self) -> bool {
self.rbus() != 0
}
#[doc="Sets the RBUS field."]
#[inline] pub fn set_rbus<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 7);
self.0 |= value << 7;
self
}
#[doc="no description available"]
#[inline] pub fn rpss(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 8) & 0x1) as u8) } // [8]
}
#[doc="Returns true if RPSS != 0"]
#[inline] pub fn test_rpss(&self) -> bool {
self.rpss() != 0
}
#[doc="Sets the RPSS field."]
#[inline] pub fn set_rpss<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 8);
self.0 |= value << 8;
self
}
#[doc="no description available"]
#[inline] pub fn pwts(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 9) & 0x1) as u8) } // [9]
}
#[doc="Returns true if PWTS != 0"]
#[inline] pub fn test_pwts(&self) -> bool {
self.pwts() != 0
}
#[doc="Sets the PWTS field."]
#[inline] pub fn set_pwts<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 9);
self.0 |= value << 9;
self
}
#[doc="no description available"]
#[inline] pub fn ets(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 10) & 0x1) as u8) } // [10]
}
#[doc="Returns true if ETS != 0"]
#[inline] pub fn test_ets(&self) -> bool {
self.ets() != 0
}
#[doc="Sets the ETS field."]
#[inline] pub fn set_ets<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 10);
self.0 |= value << 10;
self
}
#[doc="no description available"]
#[inline] pub fn fbes(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 13) & 0x1) as u8) } // [13]
}
#[doc="Returns true if FBES != 0"]
#[inline] pub fn test_fbes(&self) -> bool {
self.fbes() != 0
}
#[doc="Sets the FBES field."]
#[inline] pub fn set_fbes<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 13);
self.0 |= value << 13;
self
}
#[doc="no description available"]
#[inline] pub fn ers(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 14) & 0x1) as u8) } // [14]
}
#[doc="Returns true if ERS != 0"]
#[inline] pub fn test_ers(&self) -> bool {
self.ers() != 0
}
#[doc="Sets the ERS field."]
#[inline] pub fn set_ers<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 14);
self.0 |= value << 14;
self
}
#[doc="no description available"]
#[inline] pub fn ais(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 15) & 0x1) as u8) } // [15]
}
#[doc="Returns true if AIS != 0"]
#[inline] pub fn test_ais(&self) -> bool {
self.ais() != 0
}
#[doc="Sets the AIS field."]
#[inline] pub fn set_ais<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 15);
self.0 |= value << 15;
self
}
#[doc="no description available"]
#[inline] pub fn nis(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 16) & 0x1) as u8) } // [16]
}
#[doc="Returns true if NIS != 0"]
#[inline] pub fn test_nis(&self) -> bool {
self.nis() != 0
}
#[doc="Sets the NIS field."]
#[inline] pub fn set_nis<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 16);
self.0 |= value << 16;
self
}
#[doc="no description available"]
#[inline] pub fn rps(&self) -> ::bobbin_bits::U3 {
unsafe { ::core::mem::transmute(((self.0 >> 17) & 0x7) as u8) } // [19:17]
}
#[doc="Returns true if RPS != 0"]
#[inline] pub fn test_rps(&self) -> bool {
self.rps() != 0
}
#[doc="Sets the RPS field."]
#[inline] pub fn set_rps<V: Into<::bobbin_bits::U3>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U3 = value.into();
let value: u32 = value.into();
self.0 &= !(0x7 << 17);
self.0 |= value << 17;
self
}
#[doc="no description available"]
#[inline] pub fn tps(&self) -> ::bobbin_bits::U3 {
unsafe { ::core::mem::transmute(((self.0 >> 20) & 0x7) as u8) } // [22:20]
}
#[doc="Returns true if TPS != 0"]
#[inline] pub fn test_tps(&self) -> bool {
self.tps() != 0
}
#[doc="Sets the TPS field."]
#[inline] pub fn set_tps<V: Into<::bobbin_bits::U3>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U3 = value.into();
let value: u32 = value.into();
self.0 &= !(0x7 << 20);
self.0 |= value << 20;
self
}
#[doc="no description available"]
#[inline] pub fn ebs(&self) -> ::bobbin_bits::U3 {
unsafe { ::core::mem::transmute(((self.0 >> 23) & 0x7) as u8) } // [25:23]
}
#[doc="Returns true if EBS != 0"]
#[inline] pub fn test_ebs(&self) -> bool {
self.ebs() != 0
}
#[doc="Sets the EBS field."]
#[inline] pub fn set_ebs<V: Into<::bobbin_bits::U3>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U3 = value.into();
let value: u32 = value.into();
self.0 &= !(0x7 << 23);
self.0 |= value << 23;
self
}
#[doc="no description available"]
#[inline] pub fn mmcs(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 27) & 0x1) as u8) } // [27]
}
#[doc="Returns true if MMCS != 0"]
#[inline] pub fn test_mmcs(&self) -> bool {
self.mmcs() != 0
}
#[doc="Sets the MMCS field."]
#[inline] pub fn set_mmcs<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 27);
self.0 |= value << 27;
self
}
#[doc="no description available"]
#[inline] pub fn pmts(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 28) & 0x1) as u8) } // [28]
}
#[doc="Returns true if PMTS != 0"]
#[inline] pub fn test_pmts(&self) -> bool {
self.pmts() != 0
}
#[doc="Sets the PMTS field."]
#[inline] pub fn set_pmts<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 28);
self.0 |= value << 28;
self
}
#[doc="no description available"]
#[inline] pub fn tsts(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 29) & 0x1) as u8) } // [29]
}
#[doc="Returns true if TSTS != 0"]
#[inline] pub fn test_tsts(&self) -> bool {
self.tsts() != 0
}
#[doc="Sets the TSTS field."]
#[inline] pub fn set_tsts<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 29);
self.0 |= value << 29;
self
}
}
impl From<u32> for Dmasr {
#[inline]
fn from(other: u32) -> Self {
Dmasr(other)
}
}
impl ::core::fmt::Display for Dmasr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmasr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
if self.ts() != 0 { try!(write!(f, " ts"))}
if self.tpss() != 0 { try!(write!(f, " tpss"))}
if self.tbus() != 0 { try!(write!(f, " tbus"))}
if self.tjts() != 0 { try!(write!(f, " tjts"))}
if self.ros() != 0 { try!(write!(f, " ros"))}
if self.tus() != 0 { try!(write!(f, " tus"))}
if self.rs() != 0 { try!(write!(f, " rs"))}
if self.rbus() != 0 { try!(write!(f, " rbus"))}
if self.rpss() != 0 { try!(write!(f, " rpss"))}
if self.pwts() != 0 { try!(write!(f, " pwts"))}
if self.ets() != 0 { try!(write!(f, " ets"))}
if self.fbes() != 0 { try!(write!(f, " fbes"))}
if self.ers() != 0 { try!(write!(f, " ers"))}
if self.ais() != 0 { try!(write!(f, " ais"))}
if self.nis() != 0 { try!(write!(f, " nis"))}
if self.rps() != 0 { try!(write!(f, " rps=0x{:x}", self.rps()))}
if self.tps() != 0 { try!(write!(f, " tps=0x{:x}", self.tps()))}
if self.ebs() != 0 { try!(write!(f, " ebs=0x{:x}", self.ebs()))}
if self.mmcs() != 0 { try!(write!(f, " mmcs"))}
if self.pmts() != 0 { try!(write!(f, " pmts"))}
if self.tsts() != 0 { try!(write!(f, " tsts"))}
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA operation mode register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmaomr(pub u32);
impl Dmaomr {
#[doc="SR"]
#[inline] pub fn sr(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 1) & 0x1) as u8) } // [1]
}
#[doc="Returns true if SR != 0"]
#[inline] pub fn test_sr(&self) -> bool {
self.sr() != 0
}
#[doc="Sets the SR field."]
#[inline] pub fn set_sr<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 1);
self.0 |= value << 1;
self
}
#[doc="OSF"]
#[inline] pub fn osf(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 2) & 0x1) as u8) } // [2]
}
#[doc="Returns true if OSF != 0"]
#[inline] pub fn test_osf(&self) -> bool {
self.osf() != 0
}
#[doc="Sets the OSF field."]
#[inline] pub fn set_osf<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 2);
self.0 |= value << 2;
self
}
#[doc="RTC"]
#[inline] pub fn rtc(&self) -> ::bobbin_bits::U2 {
unsafe { ::core::mem::transmute(((self.0 >> 3) & 0x3) as u8) } // [4:3]
}
#[doc="Returns true if RTC != 0"]
#[inline] pub fn test_rtc(&self) -> bool {
self.rtc() != 0
}
#[doc="Sets the RTC field."]
#[inline] pub fn set_rtc<V: Into<::bobbin_bits::U2>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U2 = value.into();
let value: u32 = value.into();
self.0 &= !(0x3 << 3);
self.0 |= value << 3;
self
}
#[doc="FUGF"]
#[inline] pub fn fugf(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 6) & 0x1) as u8) } // [6]
}
#[doc="Returns true if FUGF != 0"]
#[inline] pub fn test_fugf(&self) -> bool {
self.fugf() != 0
}
#[doc="Sets the FUGF field."]
#[inline] pub fn set_fugf<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 6);
self.0 |= value << 6;
self
}
#[doc="FEF"]
#[inline] pub fn fef(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 7) & 0x1) as u8) } // [7]
}
#[doc="Returns true if FEF != 0"]
#[inline] pub fn test_fef(&self) -> bool {
self.fef() != 0
}
#[doc="Sets the FEF field."]
#[inline] pub fn set_fef<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 7);
self.0 |= value << 7;
self
}
#[doc="ST"]
#[inline] pub fn st(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 13) & 0x1) as u8) } // [13]
}
#[doc="Returns true if ST != 0"]
#[inline] pub fn test_st(&self) -> bool {
self.st() != 0
}
#[doc="Sets the ST field."]
#[inline] pub fn set_st<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 13);
self.0 |= value << 13;
self
}
#[doc="TTC"]
#[inline] pub fn ttc(&self) -> ::bobbin_bits::U3 {
unsafe { ::core::mem::transmute(((self.0 >> 14) & 0x7) as u8) } // [16:14]
}
#[doc="Returns true if TTC != 0"]
#[inline] pub fn test_ttc(&self) -> bool {
self.ttc() != 0
}
#[doc="Sets the TTC field."]
#[inline] pub fn set_ttc<V: Into<::bobbin_bits::U3>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U3 = value.into();
let value: u32 = value.into();
self.0 &= !(0x7 << 14);
self.0 |= value << 14;
self
}
#[doc="FTF"]
#[inline] pub fn ftf(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 20) & 0x1) as u8) } // [20]
}
#[doc="Returns true if FTF != 0"]
#[inline] pub fn test_ftf(&self) -> bool {
self.ftf() != 0
}
#[doc="Sets the FTF field."]
#[inline] pub fn set_ftf<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 20);
self.0 |= value << 20;
self
}
#[doc="TSF"]
#[inline] pub fn tsf(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 21) & 0x1) as u8) } // [21]
}
#[doc="Returns true if TSF != 0"]
#[inline] pub fn test_tsf(&self) -> bool {
self.tsf() != 0
}
#[doc="Sets the TSF field."]
#[inline] pub fn set_tsf<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 21);
self.0 |= value << 21;
self
}
#[doc="DFRF"]
#[inline] pub fn dfrf(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 24) & 0x1) as u8) } // [24]
}
#[doc="Returns true if DFRF != 0"]
#[inline] pub fn test_dfrf(&self) -> bool {
self.dfrf() != 0
}
#[doc="Sets the DFRF field."]
#[inline] pub fn set_dfrf<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 24);
self.0 |= value << 24;
self
}
#[doc="RSF"]
#[inline] pub fn rsf(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 25) & 0x1) as u8) } // [25]
}
#[doc="Returns true if RSF != 0"]
#[inline] pub fn test_rsf(&self) -> bool {
self.rsf() != 0
}
#[doc="Sets the RSF field."]
#[inline] pub fn set_rsf<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 25);
self.0 |= value << 25;
self
}
#[doc="DTCEFD"]
#[inline] pub fn dtcefd(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 26) & 0x1) as u8) } // [26]
}
#[doc="Returns true if DTCEFD != 0"]
#[inline] pub fn test_dtcefd(&self) -> bool {
self.dtcefd() != 0
}
#[doc="Sets the DTCEFD field."]
#[inline] pub fn set_dtcefd<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 26);
self.0 |= value << 26;
self
}
}
impl From<u32> for Dmaomr {
#[inline]
fn from(other: u32) -> Self {
Dmaomr(other)
}
}
impl ::core::fmt::Display for Dmaomr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmaomr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
if self.sr() != 0 { try!(write!(f, " sr"))}
if self.osf() != 0 { try!(write!(f, " osf"))}
if self.rtc() != 0 { try!(write!(f, " rtc=0x{:x}", self.rtc()))}
if self.fugf() != 0 { try!(write!(f, " fugf"))}
if self.fef() != 0 { try!(write!(f, " fef"))}
if self.st() != 0 { try!(write!(f, " st"))}
if self.ttc() != 0 { try!(write!(f, " ttc=0x{:x}", self.ttc()))}
if self.ftf() != 0 { try!(write!(f, " ftf"))}
if self.tsf() != 0 { try!(write!(f, " tsf"))}
if self.dfrf() != 0 { try!(write!(f, " dfrf"))}
if self.rsf() != 0 { try!(write!(f, " rsf"))}
if self.dtcefd() != 0 { try!(write!(f, " dtcefd"))}
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA interrupt enable register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmaier(pub u32);
impl Dmaier {
#[doc="no description available"]
#[inline] pub fn tie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0x1) as u8) } // [0]
}
#[doc="Returns true if TIE != 0"]
#[inline] pub fn test_tie(&self) -> bool {
self.tie() != 0
}
#[doc="Sets the TIE field."]
#[inline] pub fn set_tie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 0);
self.0 |= value << 0;
self
}
#[doc="no description available"]
#[inline] pub fn tpsie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 1) & 0x1) as u8) } // [1]
}
#[doc="Returns true if TPSIE != 0"]
#[inline] pub fn test_tpsie(&self) -> bool {
self.tpsie() != 0
}
#[doc="Sets the TPSIE field."]
#[inline] pub fn set_tpsie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 1);
self.0 |= value << 1;
self
}
#[doc="no description available"]
#[inline] pub fn tbuie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 2) & 0x1) as u8) } // [2]
}
#[doc="Returns true if TBUIE != 0"]
#[inline] pub fn test_tbuie(&self) -> bool {
self.tbuie() != 0
}
#[doc="Sets the TBUIE field."]
#[inline] pub fn set_tbuie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 2);
self.0 |= value << 2;
self
}
#[doc="no description available"]
#[inline] pub fn tjtie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 3) & 0x1) as u8) } // [3]
}
#[doc="Returns true if TJTIE != 0"]
#[inline] pub fn test_tjtie(&self) -> bool {
self.tjtie() != 0
}
#[doc="Sets the TJTIE field."]
#[inline] pub fn set_tjtie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 3);
self.0 |= value << 3;
self
}
#[doc="no description available"]
#[inline] pub fn roie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 4) & 0x1) as u8) } // [4]
}
#[doc="Returns true if ROIE != 0"]
#[inline] pub fn test_roie(&self) -> bool {
self.roie() != 0
}
#[doc="Sets the ROIE field."]
#[inline] pub fn set_roie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 4);
self.0 |= value << 4;
self
}
#[doc="no description available"]
#[inline] pub fn tuie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 5) & 0x1) as u8) } // [5]
}
#[doc="Returns true if TUIE != 0"]
#[inline] pub fn test_tuie(&self) -> bool {
self.tuie() != 0
}
#[doc="Sets the TUIE field."]
#[inline] pub fn set_tuie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 5);
self.0 |= value << 5;
self
}
#[doc="no description available"]
#[inline] pub fn rie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 6) & 0x1) as u8) } // [6]
}
#[doc="Returns true if RIE != 0"]
#[inline] pub fn test_rie(&self) -> bool {
self.rie() != 0
}
#[doc="Sets the RIE field."]
#[inline] pub fn set_rie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 6);
self.0 |= value << 6;
self
}
#[doc="no description available"]
#[inline] pub fn rbuie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 7) & 0x1) as u8) } // [7]
}
#[doc="Returns true if RBUIE != 0"]
#[inline] pub fn test_rbuie(&self) -> bool {
self.rbuie() != 0
}
#[doc="Sets the RBUIE field."]
#[inline] pub fn set_rbuie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 7);
self.0 |= value << 7;
self
}
#[doc="no description available"]
#[inline] pub fn rpsie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 8) & 0x1) as u8) } // [8]
}
#[doc="Returns true if RPSIE != 0"]
#[inline] pub fn test_rpsie(&self) -> bool {
self.rpsie() != 0
}
#[doc="Sets the RPSIE field."]
#[inline] pub fn set_rpsie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 8);
self.0 |= value << 8;
self
}
#[doc="no description available"]
#[inline] pub fn rwtie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 9) & 0x1) as u8) } // [9]
}
#[doc="Returns true if RWTIE != 0"]
#[inline] pub fn test_rwtie(&self) -> bool {
self.rwtie() != 0
}
#[doc="Sets the RWTIE field."]
#[inline] pub fn set_rwtie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 9);
self.0 |= value << 9;
self
}
#[doc="no description available"]
#[inline] pub fn etie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 10) & 0x1) as u8) } // [10]
}
#[doc="Returns true if ETIE != 0"]
#[inline] pub fn test_etie(&self) -> bool {
self.etie() != 0
}
#[doc="Sets the ETIE field."]
#[inline] pub fn set_etie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 10);
self.0 |= value << 10;
self
}
#[doc="no description available"]
#[inline] pub fn fbeie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 13) & 0x1) as u8) } // [13]
}
#[doc="Returns true if FBEIE != 0"]
#[inline] pub fn test_fbeie(&self) -> bool {
self.fbeie() != 0
}
#[doc="Sets the FBEIE field."]
#[inline] pub fn set_fbeie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 13);
self.0 |= value << 13;
self
}
#[doc="no description available"]
#[inline] pub fn erie(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 14) & 0x1) as u8) } // [14]
}
#[doc="Returns true if ERIE != 0"]
#[inline] pub fn test_erie(&self) -> bool {
self.erie() != 0
}
#[doc="Sets the ERIE field."]
#[inline] pub fn set_erie<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 14);
self.0 |= value << 14;
self
}
#[doc="no description available"]
#[inline] pub fn aise(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 15) & 0x1) as u8) } // [15]
}
#[doc="Returns true if AISE != 0"]
#[inline] pub fn test_aise(&self) -> bool {
self.aise() != 0
}
#[doc="Sets the AISE field."]
#[inline] pub fn set_aise<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 15);
self.0 |= value << 15;
self
}
#[doc="no description available"]
#[inline] pub fn nise(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 16) & 0x1) as u8) } // [16]
}
#[doc="Returns true if NISE != 0"]
#[inline] pub fn test_nise(&self) -> bool {
self.nise() != 0
}
#[doc="Sets the NISE field."]
#[inline] pub fn set_nise<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 16);
self.0 |= value << 16;
self
}
}
impl From<u32> for Dmaier {
#[inline]
fn from(other: u32) -> Self {
Dmaier(other)
}
}
impl ::core::fmt::Display for Dmaier {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmaier {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
if self.tie() != 0 { try!(write!(f, " tie"))}
if self.tpsie() != 0 { try!(write!(f, " tpsie"))}
if self.tbuie() != 0 { try!(write!(f, " tbuie"))}
if self.tjtie() != 0 { try!(write!(f, " tjtie"))}
if self.roie() != 0 { try!(write!(f, " roie"))}
if self.tuie() != 0 { try!(write!(f, " tuie"))}
if self.rie() != 0 { try!(write!(f, " rie"))}
if self.rbuie() != 0 { try!(write!(f, " rbuie"))}
if self.rpsie() != 0 { try!(write!(f, " rpsie"))}
if self.rwtie() != 0 { try!(write!(f, " rwtie"))}
if self.etie() != 0 { try!(write!(f, " etie"))}
if self.fbeie() != 0 { try!(write!(f, " fbeie"))}
if self.erie() != 0 { try!(write!(f, " erie"))}
if self.aise() != 0 { try!(write!(f, " aise"))}
if self.nise() != 0 { try!(write!(f, " nise"))}
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA missed frame and buffer overflow counter register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmamfbocr(pub u32);
impl Dmamfbocr {
#[doc="no description available"]
#[inline] pub fn mfc(&self) -> ::bobbin_bits::U16 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffff) as u16) } // [15:0]
}
#[doc="Returns true if MFC != 0"]
#[inline] pub fn test_mfc(&self) -> bool {
self.mfc() != 0
}
#[doc="Sets the MFC field."]
#[inline] pub fn set_mfc<V: Into<::bobbin_bits::U16>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U16 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffff << 0);
self.0 |= value << 0;
self
}
#[doc="no description available"]
#[inline] pub fn omfc(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 16) & 0x1) as u8) } // [16]
}
#[doc="Returns true if OMFC != 0"]
#[inline] pub fn test_omfc(&self) -> bool {
self.omfc() != 0
}
#[doc="Sets the OMFC field."]
#[inline] pub fn set_omfc<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 16);
self.0 |= value << 16;
self
}
#[doc="no description available"]
#[inline] pub fn mfa(&self) -> ::bobbin_bits::U11 {
unsafe { ::core::mem::transmute(((self.0 >> 17) & 0x7ff) as u16) } // [27:17]
}
#[doc="Returns true if MFA != 0"]
#[inline] pub fn test_mfa(&self) -> bool {
self.mfa() != 0
}
#[doc="Sets the MFA field."]
#[inline] pub fn set_mfa<V: Into<::bobbin_bits::U11>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U11 = value.into();
let value: u32 = value.into();
self.0 &= !(0x7ff << 17);
self.0 |= value << 17;
self
}
#[doc="no description available"]
#[inline] pub fn ofoc(&self) -> ::bobbin_bits::U1 {
unsafe { ::core::mem::transmute(((self.0 >> 28) & 0x1) as u8) } // [28]
}
#[doc="Returns true if OFOC != 0"]
#[inline] pub fn test_ofoc(&self) -> bool {
self.ofoc() != 0
}
#[doc="Sets the OFOC field."]
#[inline] pub fn set_ofoc<V: Into<::bobbin_bits::U1>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U1 = value.into();
let value: u32 = value.into();
self.0 &= !(0x1 << 28);
self.0 |= value << 28;
self
}
}
impl From<u32> for Dmamfbocr {
#[inline]
fn from(other: u32) -> Self {
Dmamfbocr(other)
}
}
impl ::core::fmt::Display for Dmamfbocr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmamfbocr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
if self.mfc() != 0 { try!(write!(f, " mfc=0x{:x}", self.mfc()))}
if self.omfc() != 0 { try!(write!(f, " omfc"))}
if self.mfa() != 0 { try!(write!(f, " mfa=0x{:x}", self.mfa()))}
if self.ofoc() != 0 { try!(write!(f, " ofoc"))}
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA receive status watchdog timer register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmarswtr(pub u32);
impl Dmarswtr {
#[doc="RSWTC"]
#[inline] pub fn rswtc(&self) -> ::bobbin_bits::U8 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xff) as u8) } // [7:0]
}
#[doc="Returns true if RSWTC != 0"]
#[inline] pub fn test_rswtc(&self) -> bool {
self.rswtc() != 0
}
#[doc="Sets the RSWTC field."]
#[inline] pub fn set_rswtc<V: Into<::bobbin_bits::U8>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U8 = value.into();
let value: u32 = value.into();
self.0 &= !(0xff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmarswtr {
#[inline]
fn from(other: u32) -> Self {
Dmarswtr(other)
}
}
impl ::core::fmt::Display for Dmarswtr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmarswtr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
if self.rswtc() != 0 { try!(write!(f, " rswtc=0x{:x}", self.rswtc()))}
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA current host transmit descriptor register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmachtdr(pub u32);
impl Dmachtdr {
#[doc="HTDAP"]
#[inline] pub fn htdap(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if HTDAP != 0"]
#[inline] pub fn test_htdap(&self) -> bool {
self.htdap() != 0
}
#[doc="Sets the HTDAP field."]
#[inline] pub fn set_htdap<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmachtdr {
#[inline]
fn from(other: u32) -> Self {
Dmachtdr(other)
}
}
impl ::core::fmt::Display for Dmachtdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmachtdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA current host receive descriptor register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmachrdr(pub u32);
impl Dmachrdr {
#[doc="HRDAP"]
#[inline] pub fn hrdap(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if HRDAP != 0"]
#[inline] pub fn test_hrdap(&self) -> bool {
self.hrdap() != 0
}
#[doc="Sets the HRDAP field."]
#[inline] pub fn set_hrdap<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmachrdr {
#[inline]
fn from(other: u32) -> Self {
Dmachrdr(other)
}
}
impl ::core::fmt::Display for Dmachrdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmachrdr {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA current host transmit buffer address register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmachtbar(pub u32);
impl Dmachtbar {
#[doc="no description available"]
#[inline] pub fn htbap(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if HTBAP != 0"]
#[inline] pub fn test_htbap(&self) -> bool {
self.htbap() != 0
}
#[doc="Sets the HTBAP field."]
#[inline] pub fn set_htbap<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmachtbar {
#[inline]
fn from(other: u32) -> Self {
Dmachtbar(other)
}
}
impl ::core::fmt::Display for Dmachtbar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmachtbar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
#[doc="Ethernet DMA current host receive buffer address register"]
#[derive(Default, Clone, Copy, PartialEq, Eq)]
pub struct Dmachrbar(pub u32);
impl Dmachrbar {
#[doc="no description available"]
#[inline] pub fn hrbap(&self) -> ::bobbin_bits::U32 {
unsafe { ::core::mem::transmute(((self.0 >> 0) & 0xffffffff) as u32) } // [31:0]
}
#[doc="Returns true if HRBAP != 0"]
#[inline] pub fn test_hrbap(&self) -> bool {
self.hrbap() != 0
}
#[doc="Sets the HRBAP field."]
#[inline] pub fn set_hrbap<V: Into<::bobbin_bits::U32>>(mut self, value: V) -> Self {
let value: ::bobbin_bits::U32 = value.into();
let value: u32 = value.into();
self.0 &= !(0xffffffff << 0);
self.0 |= value << 0;
self
}
}
impl From<u32> for Dmachrbar {
#[inline]
fn from(other: u32) -> Self {
Dmachrbar(other)
}
}
impl ::core::fmt::Display for Dmachrbar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
self.0.fmt(f)
}
}
impl ::core::fmt::Debug for Dmachrbar {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
try!(write!(f, "[0x{:08x}", self.0));
try!(write!(f, "]"));
Ok(())
}
}
| {
::bobbin_mcu::register::Register::new(self.0 as *mut Dmarpdr, 0x8)
} |
generate_css_for_a_board_with_hexcodes.py | #! /usr/bin/python2
#config
themes = {
'grey': ['#fff', '#c4c4c4'],
'green': ['#ffffdd', '#86a666'],
'blue': ['#dee3e6', '#8ca2ad'],
'brown': ['#f0d9b5', '#b58863']
}
blackPattern = 'body.{name} #GameBoard td.blackSquare, body.{name} #GameBoard td.highlightBlackSquare, body.{name} div.lcs.black, #top div.lcs.black.{name} { background: {black}; }'
whitePattern = 'body.{name} #GameBoard td.whiteSquare, body.{name} #GameBoard td.highlightWhiteSquare, body.{name} div.lcs.white, #top div.lcs.white.{name}, body.{name} div.lichess_board { background: {white}; }'
for name in themes:
def formatCss(pattern):
|
print formatCss(whitePattern)
print formatCss(blackPattern)
| return pattern.replace('{name}', name).replace('{white}', themes[name][0]).replace('{black}', themes[name][1]) |
Products.styled.js | import styled from 'styled-components';
import { buttonLinkStyles } from '@components/styled';
import {
Container as GenericContainer, SPImage,
} from '@components';
import { queries } from '@utils';
export const List = styled.ul`
display: grid;
grid-template-columns: repeat(3, 1fr);
gap: 1.71875vw;
& + & {
margin-top: 4.9vw;
}
@media ${queries.xs} {
grid-template-columns: 1fr;
}
`;
export const Product = styled.li`
> a {
:hover {
div:last-child {
filter: brightness(1.2);
border-color: #fff;
color: #fff;
}
}
@media ${queries.xs} {
display: flex;
flex-wrap: wrap;
align-items: center;
}
@media ${queries.xxsplus} {
display: block;
}
}
`;
export const Image = styled(SPImage)`
width: 100%;
> img {
width: 100%;
}
@media ${queries.xs} {
width: 40%;
}
`;
export const Pipe = styled.span` | `;
export const Name = styled.h3`
padding: 0.78125vw 0 0.68vw;
color: ${({ theme }) => theme.getColor('accent')};
font-weight: 600;
font-size: clamp(16px, 1.25vw, 24px);
line-height: 1.208333;
@media ${queries.huge} {
font-size: 22px;
}
@media ${queries.xs} {
width: 60%;
padding-left: 2.5%;
}
@media ${queries.xxsplus} {
width: 100%;
padding-left: 0;
}
`;
export const Description = styled.p`
font-weight: 600;
font-size: 20px;
line-height: 1.2;
@media ${queries.huge} {
margin-top: 1em;
font-size: 18px;
}
`;
export const Text = styled.p`
padding: 0.9375vw 0 1.198vw;
font-size: 16px;
line-height: 1.5;
em,
i {
font-style: italic
}
b,
strong {
font-weight: 600;
}
@media ${queries.xs} {
width: 100%;
}
`;
export const ArrowWrapper = styled.div`
${buttonLinkStyles};
border-color: #fff;
background: ${({ theme }) => theme.getGradient()};
color: #fff;
transition: ${({ theme }) => theme.getTransitions(['filter'])};
> svg {
fill: currentColor;
}
@media ${queries.xs} {
margin-left: auto;
padding: 0.5em 1em;
> svg {
height: 1em;
}
}
`;
export const Container = styled(GenericContainer)`
@media ${queries.xxsplus} {
padding: 0;
}
`;
export const SectionHeading = styled.h2`
display: inline-flex;
align-items: center;
gap: 0.25em;
padding: 2vw 0;
color: ${({ theme }) => theme.getColor('accent')};
font-weight: 600;
font-size: 34px;
line-height: 1.208333;
::before {
content: '';
width: 0.55em;
height: 2px;
background-color: ${({ theme }) => theme.getColor('accent')};
}
@media ${queries.huge} {
font-size: 28px;
}
@media ${queries.xs} {
width: 60%;
padding-left: 2.5%;
}
@media ${queries.xxsplus} {
width: 100%;
padding-left: 0;
}
`; | color: ${({ theme }) => theme.getColor('main')}; |
sql.go | package main
import (
"database/sql"
"encoding/json"
"fmt"
"reflect"
"github.com/go-sql-driver/mysql"
)
func handErr(err error) {
if err != nil {
panic(err)
}
}
func main() {
// db, err := sql.Open("mysql", "root:root@/test?parseTime=true&loc="+url.QueryEscape("Asia/Shanghai"))
db, err := sql.Open("mysql", "root:root@/test2")
handErr(err)
rows, err := db.Query("select * from users")
handErr(err)
defer rows.Close()
cols, err := rows.Columns()
handErr(err)
ct, err := rows.ColumnTypes()
handErr(err)
arr := make([]interface{}, len(ct))
for i, v := range ct {
t := v.ScanType()
v := reflect.New(t).Interface()
arr[i] = v
fmt.Println(cols[i], t)
}
for rows.Next() {
err = rows.Scan(arr...)
handErr(err)
m := make(map[string]interface{})
for i, col := range cols {
if col == "template_info" || col == "state" |
v := arr[i]
switch vv := v.(type) {
case *int32:
m[col] = *vv
case *sql.NullString:
m[col] = *vv
case *sql.NullBool:
m[col] = *vv
case *sql.NullFloat64:
m[col] = *vv
case *sql.NullInt64:
m[col] = *vv
case *sql.RawBytes:
m[col] = string(*vv)
case *mysql.NullTime:
m[col] = *vv
default:
m[col] = vv
panic("unknow type")
}
}
if bts, err := json.MarshalIndent(m, "", " "); err != nil {
panic(err)
} else {
fmt.Println(string(bts))
}
}
err = rows.Err()
handErr(err)
}
| {
m[col] = ""
continue
} |
tsd_net.py | import torch
from torch import nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import math
from sequicity.config import global_config as cfg
import copy, random, time, logging
from torch.distributions import Categorical
from sequicity.reader import pad_sequences
import pdb
import simulator.dialog_config as dialog_config
import pdb
def cuda_(var):
return var.cuda() if cfg.cuda else var
def toss_(p):
return random.randint(0, 99) <= p
def nan(v):
if type(v) is float:
return v == float('nan')
return np.isnan(np.sum(v.data.cpu().numpy()))
def get_sparse_input_aug(x_input_np):
"""
sparse input of
:param x_input_np: [T,B]
:return: Numpy array: [B,T,aug_V]
"""
ignore_index = [0]
unk = 2
result = np.zeros((x_input_np.shape[0], x_input_np.shape[1], cfg.vocab_size + x_input_np.shape[0]),
dtype=np.float32)
result.fill(1e-10)
for t in range(x_input_np.shape[0]):
for b in range(x_input_np.shape[1]):
w = x_input_np[t][b]
if w not in ignore_index:
if w != unk:
result[t][b][x_input_np[t][b]] = 1.0
else:
result[t][b][cfg.vocab_size + t] = 1.0
result_np = result.transpose((1, 0, 2))
result = torch.from_numpy(result_np).float()
return result
def init_gru(gru):
gru.reset_parameters()
for _, hh, _, _ in gru.all_weights:
for i in range(0, hh.size(0), gru.hidden_size):
torch.nn.init.orthogonal_(hh[i:i + gru.hidden_size], gain=1)
class Attn(nn.Module):
def __init__(self, hidden_size):
super(Attn, self).__init__()
self.hidden_size = hidden_size
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.zeros(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, hidden, encoder_outputs, mask=False, inp_seqs=None, stop_tok=None, normalize=True):
encoder_outputs = encoder_outputs.transpose(0, 1) # [B,T,H]
attn_energies = self.score(hidden, encoder_outputs)
if True or not mask:
normalized_energy = F.softmax(attn_energies, dim=2) # [B,1,T]
else:
mask_idx = []
# inp_seqs: ndarray of [T,B]
# inp_seqs = inp_seqs.cpu().numpy()
for b in range(inp_seqs.shape[1]):
for t in range(inp_seqs.shape[0] + 1):
if t == inp_seqs.shape[0] or inp_seqs[t, b] in stop_tok:
mask_idx.append(t)
break
mask = []
for mask_len in mask_idx:
mask.append([1.] * mask_len + [0.] * (inp_seqs.shape[0] - mask_len))
mask = cuda_(Variable(torch.FloatTensor(mask))) # [B,T]
attn_energies = attn_energies * mask.unsqueeze(1)
normalized_energy = F.softmax(attn_energies, dim=2) # [B,1,T]
context = torch.bmm(normalized_energy, encoder_outputs) # [B,1,H]
return context.transpose(0, 1) # [1,B,H]
def score(self, hidden, encoder_outputs):
max_len = encoder_outputs.size(1)
H = hidden.repeat(max_len, 1, 1).transpose(0, 1)
# pdb.set_trace()
energy = torch.tanh(self.attn(torch.cat([H, encoder_outputs], 2))) # [B,T,2H]->[B,T,H]
energy = energy.transpose(2, 1) # [B,H,T]
v = self.v.repeat(encoder_outputs.size(0), 1).unsqueeze(1) # [B,1,H]
energy = torch.bmm(v, energy) # [B,1,T]
return energy
class SimpleDynamicEncoder(nn.Module):
def __init__(self, input_size, embed_size, hidden_size, n_layers, dropout):
super().__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.embed_size = embed_size
self.n_layers = n_layers
self.dropout = dropout
self.embedding = nn.Embedding(input_size, embed_size)
self.gru = nn.GRU(embed_size, hidden_size, n_layers, dropout=self.dropout, bidirectional=True)
init_gru(self.gru)
def forward(self, input_seqs, input_lens, hidden=None):
"""
forward procedure. No need for inputs to be sorted
:param input_seqs: Variable of [T,B]
:param hidden:
:param input_lens: *numpy array* of len for each input sequence
:return:
"""
# print("in encoder")
# print("input_seqs", input_seqs)
# print("hidden", hidden)
# print("input_lens", input_lens)
batch_size = input_seqs.size(1)
embedded = self.embedding(input_seqs)
import pdb
if torch.isnan(embedded).sum() > 0:
pdb.set_trace()
# pass
# print("embedded", embedded)
embedded = embedded.transpose(0, 1) # [B,T,E]
sort_idx = np.argsort(-input_lens)
unsort_idx = cuda_(torch.LongTensor(np.argsort(sort_idx)))
input_lens = input_lens[sort_idx]
sort_idx = cuda_(torch.LongTensor(sort_idx))
embedded = embedded[sort_idx].transpose(0, 1) # [T,B,E]
# print("embedded", embedded)
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lens)
outputs, hidden = self.gru(packed, hidden)
# print('outputs', outputs)
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
outputs = outputs.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden = hidden.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
return outputs, hidden, embedded
class BSpanDecoder(nn.Module):
def __init__(self, embed_size, hidden_size, vocab_size, dropout_rate, vocab):
super().__init__()
self.emb = nn.Embedding(vocab_size, embed_size)
if cfg.use_positional_embedding:
self.positional_embedding = nn.Embedding(cfg.max_ts + 1, embed_size)
init_pos_emb = self.position_encoding_init(cfg.max_ts + 1, embed_size)
self.positional_embedding.weight.data = init_pos_emb
self.gru = nn.GRU(hidden_size + embed_size, hidden_size, dropout=dropout_rate)
self.proj = nn.Linear(hidden_size * 2, vocab_size)
self.attn_u = Attn(hidden_size)
self.proj_copy1 = nn.Linear(hidden_size, hidden_size)
self.proj_copy2 = nn.Linear(hidden_size, hidden_size)
self.dropout_rate = dropout_rate
self.inp_dropout = nn.Dropout(self.dropout_rate)
init_gru(self.gru)
self.vocab = vocab
def position_encoding_init(self, n_position, d_pos_vec):
position_enc = np.array([[pos / np.power(10000, 2 * (j // 2) / d_pos_vec) for j in range(d_pos_vec)]
if pos != 0 else np.zeros(d_pos_vec) for pos in range(n_position)])
position_enc[1:, 0::2] = np.sin(position_enc[1:, 0::2]) # dim 2i
position_enc[1:, 1::2] = np.cos(position_enc[1:, 1::2]) # dim 2i+1
return torch.from_numpy(position_enc).type(torch.FloatTensor)
def forward(self, u_enc_out, z_tm1, last_hidden, u_input_np, pv_z_enc_out, prev_z_input_np, u_emb, pv_z_emb,
position):
# print("in bSpanDecoder")
# print(u_input_np)
# print(u_enc_out, z_tm1, last_hidden, u_input_np, pv_z_enc_out, prev_z_input_np, u_emb, pv_z_emb,
# position)
# print("prev_z_input_np", prev_z_input_np)
sparse_u_input = Variable(get_sparse_input_aug(u_input_np), requires_grad=False)
if pv_z_enc_out is not None:
context = self.attn_u(last_hidden, torch.cat([pv_z_enc_out, u_enc_out], dim=0), mask=True,
inp_seqs=np.concatenate([prev_z_input_np, u_input_np], 0),
stop_tok=[self.vocab.encode('EOS_M')])
else:
context = self.attn_u(last_hidden, u_enc_out, mask=True, inp_seqs=u_input_np,
stop_tok=[self.vocab.encode('EOS_M')])
embed_z = self.emb(z_tm1)
# embed_z = self.inp_dropout(embed_z)
if cfg.use_positional_embedding: # defaulty not used
position_label = [position] * u_enc_out.size(1) # [B]
position_label = cuda_(Variable(torch.LongTensor(position_label))).view(1, -1) # [1,B]
pos_emb = self.positional_embedding(position_label)
embed_z = embed_z + pos_emb
gru_in = torch.cat([embed_z, context], 2)
gru_out, last_hidden = self.gru(gru_in, last_hidden)
# gru_out = self.inp_dropout(gru_out)
gen_score = self.proj(torch.cat([gru_out, context], 2)).squeeze(0)
# gen_score = self.inp_dropout(gen_score)
u_copy_score = torch.tanh(self.proj_copy1(u_enc_out.transpose(0, 1))) # [B,T,H]
# stable version of copynet
u_copy_score = torch.matmul(u_copy_score, gru_out.squeeze(0).unsqueeze(2)).squeeze(2)
u_copy_score = u_copy_score.cpu()
u_copy_score_max = torch.max(u_copy_score, dim=1, keepdim=True)[0]
u_copy_score = torch.exp(u_copy_score - u_copy_score_max) # [B,T]
u_copy_score = torch.log(torch.bmm(u_copy_score.unsqueeze(1), sparse_u_input)).squeeze(
1) + u_copy_score_max # [B,V]
u_copy_score = cuda_(u_copy_score)
if pv_z_enc_out is None:
# u_copy_score = self.inp_dropout(u_copy_score)
scores = F.softmax(torch.cat([gen_score, u_copy_score], dim=1), dim=1)
gen_score, u_copy_score = scores[:, :cfg.vocab_size], \
scores[:, cfg.vocab_size:]
proba = gen_score + u_copy_score[:, :cfg.vocab_size] # [B,V]
proba = torch.cat([proba, u_copy_score[:, cfg.vocab_size:]], 1)
else:
sparse_pv_z_input = Variable(get_sparse_input_aug(prev_z_input_np), requires_grad=False)
pv_z_copy_score = torch.tanh(self.proj_copy2(pv_z_enc_out.transpose(0, 1))) # [B,T,H]
pv_z_copy_score = torch.matmul(pv_z_copy_score, gru_out.squeeze(0).unsqueeze(2)).squeeze(2)
pv_z_copy_score = pv_z_copy_score.cpu()
pv_z_copy_score_max = torch.max(pv_z_copy_score, dim=1, keepdim=True)[0]
pv_z_copy_score = torch.exp(pv_z_copy_score - pv_z_copy_score_max) # [B,T]
pv_z_copy_score = torch.log(torch.bmm(pv_z_copy_score.unsqueeze(1), sparse_pv_z_input)).squeeze(
1) + pv_z_copy_score_max # [B,V]
pv_z_copy_score = cuda_(pv_z_copy_score)
scores = F.softmax(torch.cat([gen_score, u_copy_score, pv_z_copy_score], dim=1), dim=1)
gen_score, u_copy_score, pv_z_copy_score = scores[:, :cfg.vocab_size], \
scores[:,
cfg.vocab_size:2 * cfg.vocab_size + u_input_np.shape[0]], \
scores[:, 2 * cfg.vocab_size + u_input_np.shape[0]:]
proba = gen_score + u_copy_score[:, :cfg.vocab_size] + pv_z_copy_score[:, :cfg.vocab_size] # [B,V]
proba = torch.cat([proba, pv_z_copy_score[:, cfg.vocab_size:], u_copy_score[:, cfg.vocab_size:]], 1)
return gru_out, last_hidden, proba
class ResponseDecoder(nn.Module):
def __init__(self, embed_size, hidden_size, vocab_size, degree_size, dropout_rate, gru, proj, emb, vocab):
super().__init__()
self.emb = emb
self.attn_z = Attn(hidden_size)
self.attn_u = Attn(hidden_size)
self.gru = gru
init_gru(self.gru)
self.proj = proj
self.proj_copy1 = nn.Linear(hidden_size, hidden_size)
self.proj_copy2 = nn.Linear(hidden_size, hidden_size)
self.dropout_rate = dropout_rate
self.vocab = vocab
def get_sparse_selective_input(self, x_input_np):
result = np.zeros((x_input_np.shape[0], x_input_np.shape[1], cfg.vocab_size + x_input_np.shape[0]),
dtype=np.float32)
result.fill(1e-10)
reqs = ['address', 'phone', 'postcode', 'pricerange', 'area']
for t in range(x_input_np.shape[0] - 1):
for b in range(x_input_np.shape[1]):
w = x_input_np[t][b]
word = self.vocab.decode(w)
if word in reqs:
slot = self.vocab.encode(word + '_SLOT')
result[t + 1][b][slot] = 1.0
else:
if w == 2 or w >= cfg.vocab_size:
result[t + 1][b][cfg.vocab_size + t] = 5.0
else:
result[t + 1][b][w] = 1.0
result_np = result.transpose((1, 0, 2))
result = torch.from_numpy(result_np).float()
return result
def forward(self, z_enc_out, u_enc_out, u_input_np, m_t_input, degree_input, last_hidden, z_input_np):
sparse_z_input = Variable(self.get_sparse_selective_input(z_input_np), requires_grad=False)
m_embed = self.emb(m_t_input)
z_context = self.attn_z(last_hidden, z_enc_out, mask=True, stop_tok=[self.vocab.encode('EOS_Z2')],
inp_seqs=z_input_np)
u_context = self.attn_u(last_hidden, u_enc_out, mask=True, stop_tok=[self.vocab.encode('EOS_M')],
inp_seqs=u_input_np)
gru_in = torch.cat([m_embed, u_context, z_context, degree_input.unsqueeze(0)], dim=2)
gru_out, last_hidden = self.gru(gru_in, last_hidden)
gen_score = self.proj(torch.cat([z_context, u_context, gru_out], 2)).squeeze(0)
z_copy_score = torch.tanh(self.proj_copy2(z_enc_out.transpose(0, 1)))
z_copy_score = torch.matmul(z_copy_score, gru_out.squeeze(0).unsqueeze(2)).squeeze(2)
z_copy_score = z_copy_score.cpu()
z_copy_score_max = torch.max(z_copy_score, dim=1, keepdim=True)[0]
z_copy_score = torch.exp(z_copy_score - z_copy_score_max) # [B,T]
z_copy_score = torch.log(torch.bmm(z_copy_score.unsqueeze(1), sparse_z_input)).squeeze(
1) + z_copy_score_max # [B,V]
z_copy_score = cuda_(z_copy_score)
scores = F.softmax(torch.cat([gen_score, z_copy_score], dim=1), dim=1)
gen_score, z_copy_score = scores[:, :cfg.vocab_size], \
scores[:, cfg.vocab_size:]
proba = gen_score + z_copy_score[:, :cfg.vocab_size] # [B,V]
proba = torch.cat([proba, z_copy_score[:, cfg.vocab_size:]], 1)
return proba, last_hidden, gru_out
class ResponseDecoder_discrete(nn.Module):
def __init__(self, embed_size, hidden_size, vocab_size, degree_size, dropout_rate, gru, proj, emb, vocab):
super().__init__()
self.emb = emb
self.attn_z = Attn(hidden_size)
self.attn_u = Attn(hidden_size)
self.gru = gru
init_gru(self.gru)
self.proj_0 = nn.Linear(hidden_size+dialog_config.STATE_DIM, hidden_size+dialog_config.STATE_DIM)
self.proj_1 = nn.Linear(hidden_size+dialog_config.STATE_DIM, hidden_size+dialog_config.STATE_DIM)
self.proj_2 = nn.Linear(hidden_size+dialog_config.STATE_DIM, hidden_size+dialog_config.STATE_DIM)
self.proj = proj
self.proj_copy1 = nn.Linear(hidden_size, hidden_size)
self.proj_copy2 = nn.Linear(hidden_size, hidden_size)
self.dropout_rate = dropout_rate
self.vocab = vocab
def get_sparse_selective_input(self, x_input_np):
result = np.zeros((x_input_np.shape[0], x_input_np.shape[1], cfg.vocab_size + x_input_np.shape[0]),
dtype=np.float32)
result.fill(1e-10)
reqs = ['address', 'phone', 'postcode', 'pricerange', 'area']
for t in range(x_input_np.shape[0] - 1):
for b in range(x_input_np.shape[1]):
w = x_input_np[t][b]
word = self.vocab.decode(w)
if word in reqs:
slot = self.vocab.encode(word + '_SLOT')
result[t + 1][b][slot] = 1.0
else:
if w == 2 or w >= cfg.vocab_size:
result[t + 1][b][cfg.vocab_size + t] = 5.0
else:
result[t + 1][b][w] = 1.0
result_np = result.transpose((1, 0, 2))
result = torch.from_numpy(result_np).float()
return result
def forward(self, z_enc_out, u_enc_out, np_state):
# sparse_z_input = Variable(self.get_sparse_selective_input(z_input_np), requires_grad=False)
# m_embed = self.emb(m_t_input)
# z_context = torch.mean(z_enc_out, 0)#= self.attn_z(last_hidden, z_enc_out, mask=True, stop_tok=[self.vocab.encode('EOS_Z2')],
# inp_seqs=z_input_np)
# pdb.set_trace()
u_context = u_enc_out[-1, :, :]#= self.attn_u(last_hidden, u_enc_out, mask=True, stop_tok=[self.vocab.encode('EOS_M')],
# inp_seqs=u_input_np)
state_from_np = torch.from_numpy(np_state).float().unsqueeze(0)
output0 = F.tanh(self.proj_0(torch.cat([u_context, state_from_np], 1)))
output1 = F.sigmoid(self.proj_1(output0))
output2 = F.sigmoid(self.proj_2(output1))
# gru_in = torch.cat([u_context, z_context], dim=2)
# gru_out, last_hidden = self.gru(gru_in)
# print(z_context)
# print(z_context.shape)
# print(u_context)
# print(u_context.shape)
gen_score = self.proj(output2)#.squeeze(0)# self.proj(torch.cat([z_context, u_context, gru_out], 2)).squeeze(0)
return gen_score
"""
z_copy_score = torch.tanh(self.proj_copy2(z_enc_out.transpose(0, 1)))
z_copy_score = torch.matmul(z_copy_score, gru_out.squeeze(0).unsqueeze(2)).squeeze(2)
z_copy_score = z_copy_score.cpu()
z_copy_score_max = torch.max(z_copy_score, dim=1, keepdim=True)[0]
z_copy_score = torch.exp(z_copy_score - z_copy_score_max) # [B,T]
z_copy_score = torch.log(torch.bmm(z_copy_score.unsqueeze(1), sparse_z_input)).squeeze(
1) + z_copy_score_max # [B,V]
z_copy_score = cuda_(z_copy_score)
scores = F.softmax(torch.cat([gen_score, z_copy_score], dim=1), dim=1)
gen_score, z_copy_score = scores[:, :cfg.vocab_size], \
scores[:, cfg.vocab_size:]
proba = gen_score + z_copy_score[:, :cfg.vocab_size] # [B,V]
proba = torch.cat([proba, z_copy_score[:, cfg.vocab_size:]], 1)
"""
return proba, last_hidden, gru_out
class TSD(nn.Module):
def __init__(self, embed_size, hidden_size, vocab_size, degree_size, layer_num, dropout_rate, z_length,
max_ts, action_size=dialog_config.SYS_ACTION_CARDINALITY, discrete_act=False, beam_search=False, teacher_force=100, **kwargs):
super().__init__()
self.vocab = kwargs['vocab']
self.reader = kwargs['reader']
self.emb = nn.Embedding(vocab_size, embed_size)
self.dec_gru = nn.GRU(degree_size + embed_size + hidden_size * 2, hidden_size, dropout=dropout_rate)
self.proj = nn.Linear(hidden_size * 3, vocab_size)
self.proj_discrete = nn.Linear(hidden_size + dialog_config.STATE_DIM, action_size)
self.u_encoder = SimpleDynamicEncoder(vocab_size, embed_size, hidden_size, layer_num, dropout_rate)
self.z_decoder = BSpanDecoder(embed_size, hidden_size, vocab_size, dropout_rate, self.vocab)
self.m_decoder = ResponseDecoder(embed_size, hidden_size, vocab_size, degree_size, dropout_rate,
self.dec_gru, self.proj, self.emb, self.vocab)
self.m_decoder_discrete = ResponseDecoder_discrete(embed_size, hidden_size, vocab_size, degree_size, dropout_rate,
self.dec_gru, self.proj_discrete, self.emb, self.vocab)
self.embed_size = embed_size
self.z_length = z_length
self.max_ts = max_ts
self.discrete_act = discrete_act
self.beam_search = beam_search
self.teacher_force = teacher_force
self.pr_loss = nn.NLLLoss(ignore_index=0)
self.dec_loss = nn.NLLLoss(ignore_index=0)
self.saved_log_policy = []
if self.beam_search:
self.beam_size = kwargs['beam_size']
self.eos_token_idx = kwargs['eos_token_idx']
def forward(self, u_input, u_input_np, m_input, m_input_np, z_input, u_len, m_len, turn_states,
degree_input, mode, np_state, **kwargs):
if mode == 'train' or mode == 'valid':
pz_proba, pm_dec_proba, turn_states = \
self.forward_turn(u_input, u_len, m_input=m_input, m_len=m_len, z_input=z_input, mode='train',
turn_states=turn_states, degree_input=degree_input, u_input_np=u_input_np,
m_input_np=m_input_np, **kwargs)
loss, pr_loss, m_loss = self.supervised_loss(torch.log(pz_proba), torch.log(pm_dec_proba),
z_input, m_input)
return loss, pr_loss, m_loss, turn_states
elif mode == 'test':
if self.discrete_act:
m_output_index, pz_index, turn_states, pz_proba = self.forward_turn(u_input, u_len=u_len, z_input=z_input,
mode='test',
turn_states=turn_states,
degree_input=degree_input,
u_input_np=u_input_np,
m_input_np=m_input_np,
np_state=np_state,
**kwargs
)
return m_output_index, pz_index, turn_states, pz_proba
else:
m_output_index, pz_index, turn_states, pz_proba, mt_proba = self.forward_turn(u_input, u_len=u_len, z_input=z_input,
mode='test',
turn_states=turn_states,
degree_input=degree_input,
u_input_np=u_input_np, m_input_np=m_input_np,
**kwargs
)
return m_output_index, pz_index, turn_states, pz_proba, mt_proba
elif mode == 'rl':
loss = self.forward_turn(u_input, u_len=u_len, is_train=False, mode='rl',
turn_states=turn_states,
degree_input=degree_input,
u_input_np=u_input_np, m_input_np=m_input_np,
**kwargs
)
return loss
def forward_turn(self, u_input, u_len, turn_states, mode, degree_input, u_input_np, m_input_np=None,
m_input=None, np_state=None, m_len=None, z_input=None, **kwargs):
"""
compute required outputs for a single dialogue turn. Turn state{Dict} will be updated in each call.
:param u_input_np:
:param m_input_np:
:param u_len:
:param turn_states:
:param is_train:
:param u_input: [T,B]
:param m_input: [T,B]
:param z_input: [T,B]
:return:
"""
prev_z_input = kwargs.get('prev_z_input', None)
prev_z_input_np = kwargs.get('prev_z_input_np', None)
prev_z_len = kwargs.get('prev_z_len', None)
pv_z_emb = None
batch_size = u_input.size(1)
pv_z_enc_out = None
if prev_z_input is not None:
pv_z_enc_out, _, pv_z_emb = self.u_encoder(prev_z_input, prev_z_len)
u_enc_out, u_enc_hidden, u_emb = self.u_encoder(u_input, u_len)
last_hidden = u_enc_hidden[:-1]
z_tm1 = cuda_(Variable(torch.ones(1, batch_size).long() * 3)) # GO_2 token
m_tm1 = cuda_(Variable(torch.ones(1, batch_size).long())) # GO token
if mode == 'train':
pz_dec_outs = []
pz_proba = []
z_length = z_input.size(0) if z_input is not None else self.z_length # GO token
hiddens = [None] * batch_size
for t in range(z_length):
pz_dec_out, last_hidden, proba = \
self.z_decoder(u_enc_out=u_enc_out, u_input_np=u_input_np,
z_tm1=z_tm1, last_hidden=last_hidden,
pv_z_enc_out=pv_z_enc_out, prev_z_input_np=prev_z_input_np,
u_emb=u_emb, pv_z_emb=pv_z_emb, position=t)
pz_proba.append(proba)
pz_dec_outs.append(pz_dec_out)
z_np = z_tm1.view(-1).cpu().data.numpy()
for i in range(batch_size):
if z_np[i] == self.vocab.encode('EOS_Z2'):
hiddens[i] = last_hidden[:, i, :]
z_tm1 = z_input[t].view(1, -1)
for i in range(batch_size):
if hiddens[i] is None:
hiddens[i] = last_hidden[:, i, :]
last_hidden = torch.stack(hiddens, dim=1)
z_input_np = z_input.cpu().data.numpy()
pz_dec_outs = torch.cat(pz_dec_outs, dim=0) # [Tz,B,H]
pz_proba = torch.stack(pz_proba, dim=0)
# P(m|z,u)
pm_dec_proba, m_dec_outs = [], []
m_length = m_input.size(0) # Tm
# last_hidden = u_enc_hidden[:-1]
for t in range(m_length):
teacher_forcing = toss_(self.teacher_force)
proba, last_hidden, dec_out = self.m_decoder(pz_dec_outs, u_enc_out, u_input_np, m_tm1,
degree_input, last_hidden, z_input_np)
if teacher_forcing:
m_tm1 = m_input[t].view(1, -1)
else:
_, m_tm1 = torch.topk(proba, 1)
m_tm1 = m_tm1.view(1, -1)
pm_dec_proba.append(proba)
m_dec_outs.append(dec_out)
pm_dec_proba = torch.stack(pm_dec_proba, dim=0) # [T,B,V]
return pz_proba, pm_dec_proba, None
else:
# assert z_input is not None
z_length = z_input.size(0) if z_input is not None else None # GO token
# print("z_input", z_input)
if z_input is None:
use_predicted_zt = True
else:
use_predicted_zt = False
pz_dec_outs, bspan_index, last_hidden, pz_proba = self.bspan_decoder(u_enc_out, z_tm1, last_hidden, u_input_np,
pv_z_enc_out=pv_z_enc_out,
prev_z_input_np=prev_z_input_np,
u_emb=u_emb, pv_z_emb=pv_z_emb,
z_length=z_length,
use_predicted_zt=use_predicted_zt,
z_input=z_input)
pz_proba = torch.stack(pz_proba, dim=0)
pz_dec_outs = torch.cat(pz_dec_outs, dim=0)
degree_input = self.reader.db_degree_handler(bspan_index, kwargs['dial_id'])
degree_input = cuda_(Variable(torch.from_numpy(degree_input).float()))
if mode == 'test':
if not self.discrete_act:
if not self.beam_search:
m_output_index, m_probas = self.greedy_decode(pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden,
degree_input, bspan_index)
# else:
# m_output_index = self.beam_search_decode(pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden,
# degree_input, bspan_index)
| else:
act_logits = self.action_decode(pz_dec_outs, u_enc_out, np_state)
return act_logits, bspan_index, None, pz_proba
elif mode == 'rl':
return self.sampling_decode(pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden,
degree_input, bspan_index)
def action_decode(self, pz_dec_outs, u_enc_out, np_state):
logits = self.m_decoder_discrete(pz_dec_outs, u_enc_out, np_state)
return logits
def bspan_decoder(self, u_enc_out, z_tm1, last_hidden, u_input_np, pv_z_enc_out, prev_z_input_np, u_emb, pv_z_emb,
z_length=None, use_predicted_zt=True, z_input=None):
if not use_predicted_zt:
assert z_input is not None
assert z_length is not None
pz_dec_outs = []
pz_proba = []
decoded = []
batch_size = u_enc_out.size(1)
hiddens = [None] * batch_size
z_length = z_length if z_length is not None else cfg.z_length
# print(z_length)
# import pdb
# pdb.set_trace()
for t in range(z_length):
pz_dec_out, last_hidden, proba = \
self.z_decoder(u_enc_out=u_enc_out, u_input_np=u_input_np,
z_tm1=z_tm1, last_hidden=last_hidden, pv_z_enc_out=pv_z_enc_out,
prev_z_input_np=prev_z_input_np, u_emb=u_emb, pv_z_emb=pv_z_emb, position=t)
# print("--"*20)
# print("in bspan decoder")
# print("proba ", proba)
# print("z_tm1", z_tm1)
# print("t", t)
# print("--"*20)
pz_proba.append(proba)
pz_dec_outs.append(pz_dec_out)
# print("proba_size", proba.shape)
z_proba, z_index = torch.topk(proba, 1) # [B,1]
# print('z_index', z_index)
z_index = z_index.data.view(-1)
#####################################################
if prev_z_input_np is None:
tmp = u_input_np # [,B]
else:
# pdb.set_trace()
tmp = np.concatenate((u_input_np, prev_z_input_np), axis=0)
for i in range(z_index.size(0)):
if z_index[i] >= cfg.vocab_size:
# print(z_index)
z_index[i] = torch.tensor(int(tmp[z_index[i] - cfg.vocab_size, i]))
del tmp
decoded.append(z_index.clone())
# print(decoded)
#####################################################
for i in range(z_index.size(0)):
if z_index[i] >= cfg.vocab_size:
z_index[i] = 2 # unk
# print('z_index', z_index)
z_np = z_tm1.view(-1).cpu().data.numpy()
for i in range(batch_size):
if z_np[i] == self.vocab.encode('EOS_Z2'):
hiddens[i] = last_hidden[:, i, :]
if use_predicted_zt:
z_tm1 = cuda_(Variable(z_index).view(1, -1))
else:
z_tm1 = z_input[t].view(1, -1)
for i in range(batch_size):
if hiddens[i] is None:
hiddens[i] = last_hidden[:, i, :]
last_hidden = torch.stack(hiddens, dim=1)
if not use_predicted_zt:
z_input_np = z_input.cpu().data.numpy()
decoded = torch.stack(decoded, dim=0).transpose(0, 1)
decoded = list(decoded)
decoded = [list(_) for _ in decoded]
return pz_dec_outs, decoded, last_hidden, pz_proba
def greedy_decode(self, pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden, degree_input, bspan_index):
decoded = []
probas = []
bspan_index_np = pad_sequences(bspan_index).transpose((1, 0))
for t in range(self.max_ts):
proba, last_hidden, _ = self.m_decoder(pz_dec_outs, u_enc_out, u_input_np, m_tm1,
degree_input, last_hidden, bspan_index_np)
probas.append(proba)
mt_proba, mt_index = torch.topk(proba, 1) # [B,1]
mt_index = mt_index.data.view(-1)
decoded.append(mt_index.clone())
for i in range(mt_index.size(0)):
if mt_index[i] >= cfg.vocab_size:
mt_index[i] = 2 # unk
m_tm1 = cuda_(Variable(mt_index).view(1, -1))
decoded = torch.stack(decoded, dim=0).transpose(0, 1)
decoded = list(decoded)
return [list(_) for _ in decoded], probas
def beam_search_decode_single(self, pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden, degree_input,
bspan_index):
eos_token_id = self.vocab.encode(cfg.eos_m_token)
batch_size = pz_dec_outs.size(1)
if batch_size != 1:
raise ValueError('"Beam search single" requires batch size to be 1')
class BeamState:
def __init__(self, score, last_hidden, decoded, length):
"""
Beam state in beam decoding
:param score: sum of log-probabilities
:param last_hidden: last hidden
:param decoded: list of *Variable[1*1]* of all decoded words
:param length: current decoded sentence length
"""
self.score = score
self.last_hidden = last_hidden
self.decoded = decoded
self.length = length
def update_clone(self, score_incre, last_hidden, decoded_t):
decoded = copy.copy(self.decoded)
decoded.append(decoded_t)
clone = BeamState(self.score + score_incre, last_hidden, decoded, self.length + 1)
return clone
def beam_result_valid(decoded_t, bspan_index):
decoded_t = [_.view(-1).data[0] for _ in decoded_t]
req_slots = self.get_req_slots(bspan_index)
decoded_sentence = self.vocab.sentence_decode(decoded_t, cfg.eos_m_token)
for req in req_slots:
if req not in decoded_sentence:
return False
return True
def score_bonus(state, decoded, bspan_index):
bonus = cfg.beam_len_bonus
return bonus
def soft_score_incre(score, turn):
return score
finished, failed = [], []
states = [] # sorted by score decreasingly
dead_k = 0
states.append(BeamState(0, last_hidden, [m_tm1], 0))
bspan_index_np = np.array(bspan_index).reshape(-1, 1)
for t in range(self.max_ts):
new_states = []
k = 0
while k < len(states) and k < self.beam_size - dead_k:
state = states[k]
last_hidden, m_tm1 = state.last_hidden, state.decoded[-1]
proba, last_hidden, _ = self.m_decoder(pz_dec_outs, u_enc_out, u_input_np, m_tm1, degree_input,
last_hidden, bspan_index_np)
proba = torch.log(proba)
mt_proba, mt_index = torch.topk(proba, self.beam_size - dead_k) # [1,K]
for new_k in range(self.beam_size - dead_k):
score_incre = soft_score_incre(mt_proba[0][new_k].data[0], t) + score_bonus(state,
mt_index[0][new_k].data[
0], bspan_index)
if len(new_states) >= self.beam_size - dead_k and state.score + score_incre < new_states[-1].score:
break
decoded_t = mt_index[0][new_k]
if decoded_t.data[0] >= cfg.vocab_size:
decoded_t.data[0] = 2 # unk
if self.vocab.decode(decoded_t.data[0]) == cfg.eos_m_token:
if beam_result_valid(state.decoded, bspan_index):
finished.append(state)
dead_k += 1
else:
failed.append(state)
else:
decoded_t = decoded_t.view(1, -1)
new_state = state.update_clone(score_incre, last_hidden, decoded_t)
new_states.append(new_state)
k += 1
if self.beam_size - dead_k < 0:
break
new_states = new_states[:self.beam_size - dead_k]
new_states.sort(key=lambda x: -x.score)
states = new_states
if t == self.max_ts - 1 and not finished:
finished = failed
print('FAIL')
if not finished:
finished.append(states[0])
finished.sort(key=lambda x: -x.score)
decoded_t = finished[0].decoded
decoded_t = [_.view(-1).data[0] for _ in decoded_t]
decoded_sentence = self.vocab.sentence_decode(decoded_t, cfg.eos_m_token)
# print(decoded_sentence)
generated = torch.cat(finished[0].decoded, dim=1).data # [B=1, T]
return generated
def beam_search_decode(self, pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden, degree_input, bspan_index):
vars = torch.split(pz_dec_outs, 1, dim=1), torch.split(u_enc_out, 1, dim=1), torch.split(
m_tm1, 1, dim=1), torch.split(last_hidden, 1, dim=1), torch.split(degree_input, 1, dim=0)
decoded = []
for i, (pz_dec_out_s, u_enc_out_s, m_tm1_s, last_hidden_s, degree_input_s) in enumerate(zip(*vars)):
decoded_s = self.beam_search_decode_single(pz_dec_out_s, u_enc_out_s, m_tm1_s,
u_input_np[:, i].reshape((-1, 1)),
last_hidden_s, degree_input_s, bspan_index[i])
decoded.append(decoded_s)
return [list(_.view(-1)) for _ in decoded]
def supervised_loss(self, pz_proba, pm_dec_proba, z_input, m_input):
pz_proba, pm_dec_proba = pz_proba[:, :, :cfg.vocab_size].contiguous(), pm_dec_proba[:, :,
:cfg.vocab_size].contiguous()
pr_loss = self.pr_loss(pz_proba.view(-1, pz_proba.size(2)), z_input.view(-1))
m_loss = self.dec_loss(pm_dec_proba.view(-1, pm_dec_proba.size(2)), m_input.view(-1))
loss = pr_loss + m_loss
return loss, pr_loss, m_loss
def self_adjust(self, epoch):
pass
# REINFORCEMENT fine-tuning with MC
def possible_reqs(self):
if cfg.dataset == 'camrest':
return ['address', 'phone', 'postcode', 'pricerange', 'area']
elif cfg.dataset == 'kvret':
req_by_intent = {
'weather': ['weather_attribute'],
'navigate': ['poi', 'traffic_info', 'address', 'distance'],
'schedule': ['event', 'date', 'time', 'party', 'agenda', 'room']
}
reqs = []
for value in req_by_intent.values():
reqs.extend(value)
return reqs
else:
raise ValueError('unknown dataset')
def get_req_slots(self, bspan_index):
reqs = self.possible_reqs()
reqs = set(self.vocab.sentence_decode(bspan_index).split()).intersection(reqs)
return [_ + '_SLOT' for _ in reqs]
def reward(self, m_tm1, decoded, bspan_index):
"""
The setting of the reward function is heuristic. It can be better optimized.
:param m_tm1:
:param decoded:
:param bspan_index:
:return:
"""
req_slots = self.get_req_slots(bspan_index)
m_tm1 = self.vocab.decode(m_tm1[0])
finished = m_tm1 == 'EOS_M'
decoded = [_.view(-1)[0] for _ in decoded]
decoded_sentence = self.vocab.sentence_decode(decoded, cfg.eos_m_token).split()
reward = -0.01 if cfg.dataset == 'camrest' else 0
'''
if not finished:
if m_tm1 in req_slots:
if decoded_sentence and m_tm1 not in decoded_sentence[:-1]:
reward = 1.0
'''
# some modification for reward function.
if m_tm1 in req_slots:
if decoded_sentence and m_tm1 not in decoded_sentence[:-1]:
reward += 1.0
else:
reward -= 1.0 if cfg.dataset == 'camrest' else 0 # repeat
return reward, finished
def sampling_decode(self, pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden, degree_input, bspan_index):
vars = torch.split(pz_dec_outs, 1, dim=1), torch.split(u_enc_out, 1, dim=1), torch.split(
m_tm1, 1, dim=1), torch.split(last_hidden, 1, dim=1), torch.split(degree_input, 1, dim=0)
batch_loss = []
sample_num = 1
for i, (pz_dec_out_s, u_enc_out_s, m_tm1_s, last_hidden_s, degree_input_s) in enumerate(zip(*vars)):
if not self.get_req_slots(bspan_index[i]):
continue
for j in range(sample_num):
loss = self.sampling_decode_single(pz_dec_out_s, u_enc_out_s, m_tm1_s,
u_input_np[:, i].reshape((-1, 1)),
last_hidden_s, degree_input_s, bspan_index[i])
batch_loss.append(loss)
if not batch_loss:
return None
else:
return sum(batch_loss) / len(batch_loss)
def sampling_decode_single(self, pz_dec_outs, u_enc_out, m_tm1, u_input_np, last_hidden, degree_input, bspan_index):
decoded = []
reward_sum = 0
log_probs = []
rewards = []
bspan_index_np = np.array(bspan_index).reshape(-1, 1)
for t in range(self.max_ts):
# reward
reward, finished = self.reward(m_tm1.data.view(-1), decoded, bspan_index)
reward_sum += reward
rewards.append(reward)
if t == self.max_ts - 1:
finished = True
if finished:
loss = self.finish_episode(log_probs, rewards)
return loss
# action
proba, last_hidden, _ = self.m_decoder(pz_dec_outs, u_enc_out, u_input_np, m_tm1,
degree_input, last_hidden, bspan_index_np)
proba = proba.squeeze(0) # [B,V]
dis = Categorical(proba)
action = dis.sample()
log_probs.append(dis.log_prob(action))
mt_index = action.data.view(-1)
decoded.append(mt_index.clone())
for i in range(mt_index.size(0)):
if mt_index[i] >= cfg.vocab_size:
mt_index[i] = 2 # unk
m_tm1 = cuda_(Variable(mt_index).view(1, -1))
def finish_episode(self, log_probas, saved_rewards):
R = 0
policy_loss = []
rewards = []
for r in saved_rewards:
R = r + 0.8 * R
rewards.insert(0, R)
rewards = torch.Tensor(rewards)
# rewards = (rewards - rewards.mean()) / (rewards.std() + np.finfo(np.float32).eps)
for log_prob, reward in zip(log_probas, rewards):
policy_loss.append(-log_prob * reward)
l = len(policy_loss)
policy_loss = torch.cat(policy_loss).sum()
return policy_loss / l | #
return m_output_index, bspan_index, None, pz_proba, m_probas
|
multi_value_legacy_extended_property_request_builder.go | package item
import (
ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9 "github.com/microsoft/kiota/abstractions/go"
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87 "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph"
)
// MultiValueLegacyExtendedPropertyRequestBuilder builds and executes requests for operations under \users\{user-id}\calendar\calendarView\{event-id}\multiValueExtendedProperties\{multiValueLegacyExtendedProperty-id}
type MultiValueLegacyExtendedPropertyRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string;
// The request adapter to use to execute the requests.
requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter;
// Url template to use to build the URL for the current request builder
urlTemplate string;
}
// MultiValueLegacyExtendedPropertyRequestBuilderDeleteOptions options for Delete
type MultiValueLegacyExtendedPropertyRequestBuilderDeleteOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// MultiValueLegacyExtendedPropertyRequestBuilderGetOptions options for Get
type MultiValueLegacyExtendedPropertyRequestBuilderGetOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Request query parameters
Q *MultiValueLegacyExtendedPropertyRequestBuilderGetQueryParameters;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// MultiValueLegacyExtendedPropertyRequestBuilderGetQueryParameters the collection of multi-value extended properties defined for the event. Read-only. Nullable.
type MultiValueLegacyExtendedPropertyRequestBuilderGetQueryParameters struct {
// Expand related entities
Expand []string;
// Select properties to be returned
Select_escaped []string;
}
// MultiValueLegacyExtendedPropertyRequestBuilderPatchOptions options for Patch
type MultiValueLegacyExtendedPropertyRequestBuilderPatchOptions struct {
//
Body *i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.MultiValueLegacyExtendedProperty;
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// NewMultiValueLegacyExtendedPropertyRequestBuilderInternal instantiates a new MultiValueLegacyExtendedPropertyRequestBuilder and sets the default values.
func | (pathParameters map[string]string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*MultiValueLegacyExtendedPropertyRequestBuilder) {
m := &MultiValueLegacyExtendedPropertyRequestBuilder{
}
m.urlTemplate = "{+baseurl}/users/{user_id}/calendar/calendarView/{event_id}/multiValueExtendedProperties/{multiValueLegacyExtendedProperty_id}{?select,expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = pathParameters;
m.requestAdapter = requestAdapter;
return m
}
// NewMultiValueLegacyExtendedPropertyRequestBuilder instantiates a new MultiValueLegacyExtendedPropertyRequestBuilder and sets the default values.
func NewMultiValueLegacyExtendedPropertyRequestBuilder(rawUrl string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*MultiValueLegacyExtendedPropertyRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewMultiValueLegacyExtendedPropertyRequestBuilderInternal(urlParams, requestAdapter)
}
// CreateDeleteRequestInformation the collection of multi-value extended properties defined for the event. Read-only. Nullable.
func (m *MultiValueLegacyExtendedPropertyRequestBuilder) CreateDeleteRequestInformation(options *MultiValueLegacyExtendedPropertyRequestBuilderDeleteOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.DELETE
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreateGetRequestInformation the collection of multi-value extended properties defined for the event. Read-only. Nullable.
func (m *MultiValueLegacyExtendedPropertyRequestBuilder) CreateGetRequestInformation(options *MultiValueLegacyExtendedPropertyRequestBuilderGetOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.GET
if options != nil && options.Q != nil {
requestInfo.AddQueryParameters(*(options.Q))
}
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreatePatchRequestInformation the collection of multi-value extended properties defined for the event. Read-only. Nullable.
func (m *MultiValueLegacyExtendedPropertyRequestBuilder) CreatePatchRequestInformation(options *MultiValueLegacyExtendedPropertyRequestBuilderPatchOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.PATCH
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", options.Body)
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// Delete the collection of multi-value extended properties defined for the event. Read-only. Nullable.
func (m *MultiValueLegacyExtendedPropertyRequestBuilder) Delete(options *MultiValueLegacyExtendedPropertyRequestBuilderDeleteOptions)(error) {
requestInfo, err := m.CreateDeleteRequestInformation(options);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(*requestInfo, nil)
if err != nil {
return err
}
return nil
}
// Get the collection of multi-value extended properties defined for the event. Read-only. Nullable.
func (m *MultiValueLegacyExtendedPropertyRequestBuilder) Get(options *MultiValueLegacyExtendedPropertyRequestBuilderGetOptions)(*i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.MultiValueLegacyExtendedProperty, error) {
requestInfo, err := m.CreateGetRequestInformation(options);
if err != nil {
return nil, err
}
res, err := m.requestAdapter.SendAsync(*requestInfo, func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.NewMultiValueLegacyExtendedProperty() }, nil)
if err != nil {
return nil, err
}
return res.(*i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.MultiValueLegacyExtendedProperty), nil
}
// Patch the collection of multi-value extended properties defined for the event. Read-only. Nullable.
func (m *MultiValueLegacyExtendedPropertyRequestBuilder) Patch(options *MultiValueLegacyExtendedPropertyRequestBuilderPatchOptions)(error) {
requestInfo, err := m.CreatePatchRequestInformation(options);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(*requestInfo, nil)
if err != nil {
return err
}
return nil
}
| NewMultiValueLegacyExtendedPropertyRequestBuilderInternal |
bitz.py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
class bitz(Exchange):
def describe(self):
return self.deep_extend(super(bitz, self).describe(), {
'id': 'bitz',
'name': 'Bit-Z',
'countries': ['HK'],
'rateLimit': 2000,
'version': 'v2',
'userAgent': self.userAgents['chrome'],
'has': {
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'createMarketOrder': False,
'fetchBalance': True,
'fetchDeposits': True,
'fetchClosedOrders': True,
'fetchMarkets': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTransactions': False,
'fetchWithdrawals': True,
'withdraw': True,
},
'timeframes': {
'1m': '1min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '60min',
'4h': '4hour',
'1d': '1day',
'5d': '5day',
'1w': '1week',
'1M': '1mon',
},
'hostname': 'apiv2.bitz.com',
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87443304-fec5e000-c5fd-11ea-98f8-ba8e67f7eaff.jpg',
'api': {
'market': 'https://{hostname}',
'trade': 'https://{hostname}',
'assets': 'https://{hostname}',
},
'www': 'https://www.bitz.com',
'doc': 'https://apidoc.bitz.com/en/',
'fees': 'https://www.bitz.com/fee?type=1',
'referral': 'https://u.bitz.com/register?invite_code=1429193',
},
'api': {
'market': {
'get': [
'ticker',
'depth',
'order', # trades
'tickerall',
'kline',
'symbolList',
'getServerTime',
'currencyRate',
'currencyCoinRate',
'coinRate',
],
},
'trade': {
'post': [
'addEntrustSheet',
'cancelEntrustSheet',
'cancelAllEntrustSheet',
'coinOut', # withdraw
'getUserHistoryEntrustSheet', # closed orders
'getUserNowEntrustSheet', # open orders
'getEntrustSheetInfo', # order
'depositOrWithdraw', # transactions
],
},
'assets': {
'post': [
'getUserAssets',
],
},
},
'fees': {
'trading': {
'maker': 0.002,
'taker': 0.002,
},
'funding': {
'withdraw': {
'BTC': '0.5%',
'DKKT': '0.5%',
'ETH': 0.01,
'USDT': '0.5%',
'LTC': '0.5%',
'FCT': '0.5%',
'LSK': '0.5%',
'HXI': '0.8%',
'ZEC': '0.5%',
'DOGE': '0.5%',
'MZC': '0.5%',
'ETC': '0.5%',
'GXS': '0.5%',
'XPM': '0.5%',
'PPC': '0.5%',
'BLK': '0.5%',
'XAS': '0.5%',
'HSR': '0.5%',
'NULS': 5.0,
'VOISE': 350.0,
'PAY': 1.5,
'EOS': 0.6,
'YBCT': 35.0,
'OMG': 0.3,
'OTN': 0.4,
'BTX': '0.5%',
'QTUM': '0.5%',
'DASH': '0.5%',
'GAME': '0.5%',
'BCH': '0.5%',
'GNT': 9.0,
'SSS': 1500.0,
'ARK': '0.5%',
'PART': '0.5%',
'LEO': '0.5%',
'DGB': '0.5%',
'ZSC': 130.0,
'VIU': 350.0,
'BTG': '0.5%',
'ARN': 10.0,
'VTC': '0.5%',
'BCD': '0.5%',
'TRX': 200.0,
'HWC': '0.5%',
'UNIT': '0.5%',
'OXY': '0.5%',
'MCO': 0.3500,
'SBTC': '0.5%',
'BCX': '0.5%',
'ETF': '0.5%',
'PYLNT': 0.4000,
'XRB': '0.5%',
'ETP': '0.5%',
},
},
},
'precision': {
'amount': 8,
'price': 8,
},
'options': {
'fetchOHLCVVolume': True,
'fetchOHLCVWarning': True,
'lastNonceTimestamp': 0,
},
'commonCurrencies': {
# https://github.com/ccxt/ccxt/issues/3881
# https://support.bit-z.pro/hc/en-us/articles/360007500654-BOX-BOX-Token-
'BOX': 'BOX Token',
'LEO': 'LeoCoin',
'XRB': 'NANO',
'PXC': 'Pixiecoin',
'VTC': 'VoteCoin',
'TTC': 'TimesChain',
},
'exceptions': {
# '200': Success
'-102': ExchangeError, # Invalid parameter
'-103': AuthenticationError, # Verification failed
'-104': ExchangeNotAvailable, # Network Error-1
'-105': AuthenticationError, # Invalid api signature
'-106': ExchangeNotAvailable, # Network Error-2
'-109': AuthenticationError, # Invalid scretKey
'-110': DDoSProtection, # The number of access requests exceeded
'-111': PermissionDenied, # Current IP is not in the range of trusted IP
'-112': OnMaintenance, # Service is under maintenance
'-114': RateLimitExceeded, # The number of daily requests has reached the limit
'-117': AuthenticationError, # The apikey expires
'-100015': AuthenticationError, # Trade password error
'-100044': ExchangeError, # Fail to request data
'-100101': ExchangeError, # Invalid symbol
'-100201': ExchangeError, # Invalid symbol
'-100301': ExchangeError, # Invalid symbol
'-100401': ExchangeError, # Invalid symbol
'-100302': ExchangeError, # Type of K-line error
'-100303': ExchangeError, # Size of K-line error
'-200003': AuthenticationError, # Please set trade password
'-200005': PermissionDenied, # This account can not trade
'-200025': ExchangeNotAvailable, # Temporary trading halt
'-200027': InvalidOrder, # Price Error
'-200028': InvalidOrder, # Amount must be greater than 0
'-200029': InvalidOrder, # Number must be between %s and %d
'-200030': InvalidOrder, # Over price range
'-200031': InsufficientFunds, # Insufficient assets
'-200032': ExchangeError, # System error. Please contact customer service
'-200033': ExchangeError, # Fail to trade
'-200034': OrderNotFound, # The order does not exist
'-200035': OrderNotFound, # Cancellation error, order filled
'-200037': InvalidOrder, # Trade direction error
'-200038': ExchangeError, # Trading Market Error
'-200055': OrderNotFound, # Order record does not exist
'-300069': AuthenticationError, # api_key is illegal
'-300101': ExchangeError, # Transaction type error
'-300102': InvalidOrder, # Price or number cannot be less than 0
'-300103': AuthenticationError, # Trade password error
'-301001': ExchangeNotAvailable, # Network Error-3
},
})
def fetch_markets(self, params={}):
response = self.marketGetSymbolList(params)
#
# { status: 200,
# msg: "",
# data: { ltc_btc: { id: "1",
# name: "ltc_btc",
# coinFrom: "ltc",
# coinTo: "btc",
# numberFloat: "4",
# priceFloat: "8",
# status: "1",
# minTrade: "0.010",
# maxTrade: "500000000.000"},
# qtum_usdt: { id: "196",
# name: "qtum_usdt",
# coinFrom: "qtum",
# coinTo: "usdt",
# numberFloat: "4",
# priceFloat: "2",
# status: "1",
# minTrade: "0.100",
# maxTrade: "500000000.000"}, },
# time: 1535969146,
# microtime: "0.66955600 1535969146",
# source: "api" }
#
markets = self.safe_value(response, 'data')
ids = list(markets.keys())
result = []
for i in range(0, len(ids)):
id = ids[i]
market = markets[id]
numericId = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'coinFrom')
quoteId = self.safe_string(market, 'coinTo')
base = baseId.upper()
quote = quoteId.upper()
base = self.safe_currency_code(base)
quote = self.safe_currency_code(quote)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'numberFloat'),
'price': self.safe_integer(market, 'priceFloat'),
}
result.append({
'info': market,
'id': id,
'numericId': numericId,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'minTrade'),
'max': self.safe_float(market, 'maxTrade'),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.assetsPostGetUserAssets(params)
#
# {
# status: 200,
# msg: "",
# data: {
# cny: 0,
# usd: 0,
# btc_total: 0,
# info: [{
# "name": "zpr",
# "num": "37.49067275",
# "over": "37.49067275",
# "lock": "0.00000000",
# "btc": "0.00000000",
# "usd": "0.00000000",
# "cny": "0.00000000",
# }],
# },
# time: 1535983966,
# microtime: "0.70400500 1535983966",
# source: "api",
# }
#
balances = self.safe_value(response['data'], 'info')
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'name')
code = self.safe_currency_code(currencyId)
account = self.account()
account['used'] = self.safe_float(balance, 'lock')
account['total'] = self.safe_float(balance, 'num')
account['free'] = self.safe_float(balance, 'over')
result[code] = account
return self.parse_balance(result)
def parse_ticker(self, ticker, market=None):
#
# { symbol: "eth_btc",
# quoteVolume: "3905.72",
# volume: "97058.21",
# priceChange: "-1.72",
# priceChange24h: "-1.65",
# askPrice: "0.03971272",
# askQty: "0.0663",
# bidPrice: "0.03961469",
# bidQty: "19.5451",
# open: "0.04036769",
# high: "0.04062988",
# low: "0.03956123",
# now: "0.03970100",
# firstId: 115567767,
# lastId: 115795316,
# dealCount: 14078,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "1959.05",
# usd: "287.10",
# krw: "318655.82" }
#
timestamp = None
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market, '_')
last = self.safe_float(ticker, 'now')
open = self.safe_float(ticker, 'open')
change = None
average = None
if last is not None and open is not None:
change = last - open
average = self.sum(last, open) / 2
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'bidPrice'),
'bidVolume': self.safe_float(ticker, 'bidQty'),
'ask': self.safe_float(ticker, 'askPrice'),
'askVolume': self.safe_float(ticker, 'askQty'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': self.safe_float(ticker, 'priceChange24h'),
'average': average,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': self.safe_float(ticker, 'quoteVolume'),
'info': ticker,
}
def parse_microtime(self, microtime):
if microtime is None:
return microtime
parts = microtime.split(' ')
milliseconds = float(parts[0])
seconds = int(parts[1])
total = self.sum(seconds, milliseconds)
return int(total * 1000)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.marketGetTicker(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { symbol: "eth_btc",
# quoteVolume: "3905.72",
# volume: "97058.21",
# priceChange: "-1.72",
# priceChange24h: "-1.65",
# askPrice: "0.03971272",
# askQty: "0.0663",
# bidPrice: "0.03961469",
# bidQty: "19.5451",
# open: "0.04036769",
# high: "0.04062988",
# low: "0.03956123",
# now: "0.03970100",
# firstId: 115567767,
# lastId: 115795316,
# dealCount: 14078,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "1959.05",
# usd: "287.10",
# krw: "318655.82" },
# time: 1535970397,
# microtime: "0.76341900 1535970397",
# source: "api" }
#
ticker = self.parse_ticker(response['data'], market)
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
return self.extend(ticker, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
ids = self.market_ids(symbols)
request['symbols'] = ','.join(ids)
response = self.marketGetTickerall(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { ela_btc: { symbol: "ela_btc",
# quoteVolume: "0.00",
# volume: "3.28",
# priceChange: "0.00",
# priceChange24h: "0.00",
# askPrice: "0.00147984",
# askQty: "5.4580",
# bidPrice: "0.00120230",
# bidQty: "12.5384",
# open: "0.00149078",
# high: "0.00149078",
# low: "0.00149078",
# now: "0.00149078",
# firstId: 115581219,
# lastId: 115581219,
# dealCount: 1,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "73.66",
# usd: "10.79",
# krw: "11995.03" } },
# time: 1535971578,
# microtime: "0.39854200 1535971578",
# source: "api" }
#
tickers = self.safe_value(response, 'data')
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
result = {}
ids = list(tickers.keys())
for i in range(0, len(ids)):
id = ids[i]
ticker = tickers[id]
market = None
if id in self.markets_by_id:
market = self.markets_by_id[id]
ticker = self.parse_ticker(tickers[id], market)
symbol = ticker['symbol']
if symbol is None:
if market is not None:
symbol = market['symbol']
else:
baseId, quoteId = id.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if symbol is not None:
result[symbol] = self.extend(ticker, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
return self.filter_by_array(result, 'symbol', symbols)
def fetch_time(self, params={}):
response = self.marketGetGetServerTime(params)
#
# {
# "status":200,
# "msg":"",
# "data":[],
# "time":1555490875,
# "microtime":"0.35994200 1555490875",
# "source":"api"
# }
#
return self.safe_timestamp(response, 'time')
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'symbol': self.market_id(symbol),
}
response = self.marketGetDepth(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { asks: [["10.00000000", "0.4426", "4.4260"],
# ["1.00000000", "0.8339", "0.8339"],
# ["0.91700000", "0.0500", "0.0458"],
# ["0.20000000", "0.1000", "0.0200"],
# ["0.03987120", "16.1262", "0.6429"],
# ["0.03986120", "9.7523", "0.3887"] ],
# bids: [["0.03976145", "0.0359", "0.0014"],
# ["0.03973401", "20.9493", "0.8323"],
# ["0.03967970", "0.0328", "0.0013"],
# ["0.00000002", "10000.0000", "0.0002"],
# ["0.00000001", "231840.7500", "0.0023"]],
# coinPair: "eth_btc" },
# time: 1535974778,
# microtime: "0.04017400 1535974778",
# source: "api" }
#
orderbook = self.safe_value(response, 'data')
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
return self.parse_order_book(orderbook, timestamp)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {id: 115807453,
# t: "19:36:24",
# T: 1535974584,
# p: "0.03983296",
# n: "0.1000",
# s: "buy" },
#
id = self.safe_string(trade, 'id')
timestamp = self.safe_timestamp(trade, 'T')
symbol = None
if market is not None:
symbol = market['symbol']
price = self.safe_float(trade, 'p')
amount = self.safe_float(trade, 'n')
cost = None
if price is not None:
if amount is not None:
cost = self.price_to_precision(symbol, amount * price)
side = self.safe_string(trade, 's')
return {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': None,
'type': 'limit',
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.marketGetOrder(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: [{id: 115807453,
# t: "19:36:24",
# T: 1535974584,
# p: "0.03983296",
# n: "0.1000",
# s: "buy" },
# {id: 115806811,
# t: "19:33:19",
# T: 1535974399,
# p: "0.03981135",
# n: "9.4612",
# s: "sell" } ],
# time: 1535974583,
# microtime: "0.57118100 1535974583",
# source: "api" }
#
return self.parse_trades(response['data'], market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# time: "1535973420000",
# open: "0.03975084",
# high: "0.03975084",
# low: "0.03967700",
# close: "0.03967700",
# volume: "12.4733",
# datetime: "2018-09-03 19:17:00"
# }
#
return [
self.safe_integer(ohlcv, 'time'),
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, 'volume'),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
duration = self.parse_timeframe(timeframe) * 1000
market = self.market(symbol)
request = {
'symbol': market['id'],
'resolution': self.timeframes[timeframe],
}
if limit is not None:
request['size'] = min(limit, 300) # 1-300
if since is not None:
request['to'] = self.sum(since, limit * duration * 1000)
else:
if since is not None:
raise ArgumentsRequired(self.id + ' fetchOHLCV requires a limit argument if the since argument is specified')
response = self.marketGetKline(self.extend(request, params))
#
# {
# status: 200,
# msg: "",
# data: {
# bars: [
# {time: "1535973420000", open: "0.03975084", high: "0.03975084", low: "0.03967700", close: "0.03967700", volume: "12.4733", datetime: "2018-09-03 19:17:00"},
# {time: "1535955480000", open: "0.04009900", high: "0.04016745", low: "0.04009900", close: "0.04012074", volume: "74.4803", datetime: "2018-09-03 14:18:00"},
# ],
# resolution: "1min",
# symbol: "eth_btc",
# from: "1535973420000",
# to: "1535955480000",
# size: 300
# },
# time: 1535973435,
# microtime: "0.56462100 1535973435",
# source: "api"
# }
#
data = self.safe_value(response, 'data', {})
bars = self.safe_value(data, 'bars', [])
return self.parse_ohlcvs(bars, market, timeframe, since, limit)
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'open', # partially filled
'2': 'closed', # filled
'3': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "id": "693248739", # order id
# "uId": "2074056", # uid
# "price": "100", # price
# "number": "10", # number
# "numberOver": "10", # undealed
# "flag": "sale", # flag
# "status": "0", # unfilled
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "numberDeal": "0" # dealed
# }
#
id = self.safe_string(order, 'id')
symbol = None
if market is None:
baseId = self.safe_string(order, 'coinFrom')
quoteId = self.safe_string(order, 'coinTo')
if (baseId is not None) and (quoteId is not None):
marketId = baseId + '_' + quoteId
if marketId in self.markets_by_id:
market = self.safe_value(self.markets_by_id, marketId)
else:
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if market is not None:
symbol = market['symbol']
side = self.safe_string(order, 'flag')
if side is not None:
side = 'sell' if (side == 'sale') else 'buy'
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'number')
remaining = self.safe_float(order, 'numberOver')
filled = self.safe_float(order, 'numberDeal')
timestamp = self.safe_integer(order, 'timestamp')
if timestamp is None:
timestamp = self.safe_timestamp(order, 'created')
cost = self.safe_float(order, 'orderTotalPrice')
if price is not None:
if filled is not None:
cost = filled * price
status = self.parse_order_status(self.safe_string(order, 'status'))
return {
'id': id,
'clientOrderId': None,
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': None,
'info': order,
'average': None,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
if type != 'limit':
raise ExchangeError(self.id + ' createOrder allows limit orders only')
market = self.market(symbol)
orderType = '1' if (side == 'buy') else '2'
if not self.password:
raise ExchangeError(self.id + ' createOrder() requires you to set exchange.password = "YOUR_TRADING_PASSWORD"(a trade password is NOT THE SAME as your login password)')
request = {
'symbol': market['id'],
'type': orderType,
'price': self.price_to_precision(symbol, price),
'number': self.amount_to_precision(symbol, amount),
'tradePwd': self.password,
}
response = self.tradePostAddEntrustSheet(self.extend(request, params))
#
# {
# "status": 200,
# "msg": "",
# "data": {
# "id": "693248739", # order id
# "uId": "2074056", # uid
# "price": "100", # price
# "number": "10", # number
# "numberOver": "10", # undealed
# "flag": "sale", # flag
# "status": "0", # unfilled
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "numberDeal": "0" # dealed
# },
# "time": "1533035297",
# "microtime": "0.41892000 1533035297",
# "source": "api",
# }
#
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
order = self.extend({
'timestamp': timestamp,
}, response['data'])
return self.parse_order(order, market)
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'entrustSheetId': id,
}
response = self.tradePostCancelEntrustSheet(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"1000.00000000",
# "lock":"-1000.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"9999.99999999",
# "lock":"9999.99999999"
# }
# },
# "time":"1535464383",
# "microtime":"0.91558000 1535464383",
# "source":"api"
# }
#
return response
def cancel_orders(self, ids, symbol=None, params={}):
self.load_markets()
request = {
'ids': ','.join(ids),
}
response = self.tradePostCancelEntrustSheet(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "744173808":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"100.00000000",
# "lock":"-100.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"899.99999999",
# "lock":"19099.99999999"
# }
# },
# "744173809":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"100.00000000",
# "lock":"-100.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"999.99999999",
# "lock":"18999.99999999"
# }
# }
# },
# "time":"1535525649",
# "microtime":"0.05009400 1535525649",
# "source":"api"
# }
#
return response
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'entrustSheetId': id,
}
response = self.tradePostGetEntrustSheetInfo(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "id":"708279852",
# "uId":"2074056",
# "price":"100.00000000",
# "number":"10.0000",
# "total":"0.00000000",
# "numberOver":"10.0000",
# "numberDeal":"0.0000",
# "flag":"sale",
# "status":"0", #0:unfilled, 1:partial deal, 2:all transactions, 3:already cancelled
# "coinFrom":"bz",
# "coinTo":"usdt",
# "orderTotalPrice":"0",
# "created":"1533279876"
# },
# "time":"1533280294",
# "microtime":"0.36859200 1533280294",
# "source":"api"
# }
#
return self.parse_order(response['data'])
def fetch_orders_with_method(self, method, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'coinFrom': market['baseId'],
'coinTo': market['quoteId'],
# 'type': 1, # optional integer, 1 = buy, 2 = sell
# 'page': 1, # optional integer
# 'pageSize': 100, # optional integer, max 100
# 'startTime': 1510235730, # optional integer timestamp in seconds
# 'endTime': 1510235730, # optional integer timestamp in seconds
}
if limit is not None:
request['page'] = 1
request['pageSize'] = limit
if since is not None:
request['startTime'] = int(since / 1000)
# request['endTime'] = int(since / 1000)
response = getattr(self, method)(self.extend(request, params))
#
# {
# "status": 200,
# "msg": "",
# "data": {
# "data": [
# {
# "id": "693248739",
# "uid": "2074056",
# "price": "100.00000000",
# "number": "10.0000",
# "total": "0.00000000",
# "numberOver": "0.0000",
# "numberDeal": "0.0000",
# "flag": "sale",
# "status": "3", # 0:unfilled, 1:partial deal, 2:all transactions, 3:already cancelled
# "isNew": "N",
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "created": "1533035300",
# },
# {
# "id": "723086996",
# "uid": "2074056",
# "price": "100.00000000",
# "number": "10.0000",
# "total": "0.00000000",
# "numberOver": "0.0000",
# "numberDeal": "0.0000",
# "flag": "sale",
# "status": "3",
# "isNew": "N",
# "coinFrom": "bz",
# "coinTo": "usdt",
# "created": "1533523568",
# },
# ],
# "pageInfo": {
# "limit": "10",
# "offest": "0",
# "current_page": "1",
# "page_size": "10",
# "total_count": "17",
# "page_count": "2",
# }
# },
# "time": "1533279329",
# "microtime": "0.15305300 1533279329",
# "source": "api"
# }
#
orders = self.safe_value(response['data'], 'data', [])
return self.parse_orders(orders, None, since, limit)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserHistoryEntrustSheet', symbol, since, limit, params)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserNowEntrustSheet', symbol, since, limit, params)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserHistoryEntrustSheet', symbol, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
'1': 'pending',
'2': 'pending',
'3': 'pending',
'4': 'ok',
'5': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# {
# "id": '96275',
# "uid": '2109073',
# "wallet": '0xf4c4141c0127bc37b1d0c409a091920eba13ada7',
# "txid": '0xb7adfa52aa566f9ac112e3c01f77bd91179b19eab12092a9a5a8b33d5086e31d',
# "confirm": '12',
# "number": '0.50000000',
# "status": 4,
# "updated": '1534944168605',
# "addressUrl": 'https://etherscan.io/address/',
# "txidUrl": 'https://etherscan.io/tx/',
# "description": 'Ethereum',
# "coin": 'eth',
# "memo": ''
# }
#
# {
# "id":"397574",
# "uid":"2033056",
# "wallet":"1AG1gZvQAYu3WBvgg7p4BMMghQD2gE693k",
# "txid":"",
# "confirm":"0",
# "number":"1000.00000000",
# "status":1,
# "updated":"0",
# "addressUrl":"http://omniexplorer.info/lookupadd.aspx?address=",
# "txidUrl":"http://omniexplorer.info/lookuptx.aspx?txid=",
# "description":"Tether",
# "coin":"usdt",
# "memo":""
# }
#
# {
# "id":"153606",
# "uid":"2033056",
# "wallet":"1AG1gZvQAYu3WBvgg7p4BMMghQD2gE693k",
# "txid":"aa2b179f84cd6dedafd41845e0fbf7f01e14c0d71ea3140d03d6f5a9ccd93199",
# "confirm":"0",
# "number":"761.11110000",
# "status":4,
# "updated":"1536726133579",
# "addressUrl":"http://omniexplorer.info/lookupadd.aspx?address=",
# "txidUrl":"http://omniexplorer.info/lookuptx.aspx?txid=",
# "description":"Tether",
# "coin":"usdt",
# "memo":""
# }
#
# withdraw
#
# {
# "id":397574,
# "email":"***@email.com",
# "coin":"usdt",
# "network_fee":"",
# "eid":23112
# }
#
timestamp = self.safe_integer(transaction, 'updated')
if timestamp == 0:
timestamp = None
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
type = self.safe_string_lower(transaction, 'type')
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
fee = None
feeCost = self.safe_float(transaction, 'network_fee')
if feeCost is not None:
fee = {
'cost': feeCost,
'code': code,
}
return {
'id': self.safe_string(transaction, 'id'),
'txid': self.safe_string(transaction, 'txid'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': self.safe_string(transaction, 'wallet'),
'tag': self.safe_string(transaction, 'memo'),
'type': type,
'amount': self.safe_float(transaction, 'number'),
'currency': code,
'status': status,
'updated': timestamp,
'fee': fee,
'info': transaction,
}
def parse_transactions_by_type(self, type, transactions, code=None, since=None, limit=None):
result = []
for i in range(0, len(transactions)):
transaction = self.parse_transaction(self.extend({
'type': type,
}, transactions[i]))
result.append(transaction)
return self.filter_by_currency_since_limit(result, code, since, limit)
def parse_transaction_type(self, type):
types = {
'deposit': 1,
'withdrawal': 2,
}
return self.safe_integer(types, type, type)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_for_type('deposit', code, since, limit, params)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_for_type('withdrawal', code, since, limit, params)
def fetch_transactions_for_type(self, type, code=None, since=None, limit=None, params={}):
if code is None:
|
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'type': self.parse_transaction_type(type),
}
if since is not None:
request['startTime'] = int(since / str(1000))
if limit is not None:
request['page'] = 1
request['pageSize'] = limit
response = self.tradePostDepositOrWithdraw(self.extend(request, params))
transactions = self.safe_value(response['data'], 'data', [])
return self.parse_transactions_by_type(type, transactions, code, since, limit)
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'number': self.currency_to_precision(code, amount),
'address': address,
# 'type': 'erc20', # omni, trc20, optional
}
if tag is not None:
request['memo'] = tag
response = self.tradePostCoinOut(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "id":397574,
# "email":"***@email.com",
# "coin":"usdt",
# "network_fee":"",
# "eid":23112
# },
# "time":1552641646,
# "microtime":"0.70304500 1552641646",
# "source":"api"
# }
#
data = self.safe_value(response, 'data', {})
return self.parse_transaction(data, currency)
def nonce(self):
currentTimestamp = self.seconds()
if currentTimestamp > self.options['lastNonceTimestamp']:
self.options['lastNonceTimestamp'] = currentTimestamp
self.options['lastNonce'] = 100000
self.options['lastNonce'] = self.sum(self.options['lastNonce'], 1)
return self.options['lastNonce']
def sign(self, path, api='market', method='GET', params={}, headers=None, body=None):
baseUrl = self.implode_params(self.urls['api'][api], {'hostname': self.hostname})
url = baseUrl + '/' + self.capitalize(api) + '/' + path
query = None
if api == 'market':
query = self.urlencode(params)
if len(query):
url += '?' + query
else:
self.check_required_credentials()
body = self.rawencode(self.keysort(self.extend({
'apiKey': self.apiKey,
'timeStamp': self.seconds(),
'nonce': self.nonce(),
}, params)))
body += '&sign=' + self.hash(self.encode(body + self.secret))
headers = {'Content-type': 'application/x-www-form-urlencoded'}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
status = self.safe_string(response, 'status')
if status is not None:
feedback = self.id + ' ' + body
#
# {"status":-107,"msg":"","data":"","time":1535968848,"microtime":"0.89092200 1535968848","source":"api"}
#
if status == '200':
#
# {"status":200,"msg":"","data":-200031,"time":1535999806,"microtime":"0.85476800 1535999806","source":"api"}
#
code = self.safe_integer(response, 'data')
if code is not None:
self.throw_exactly_matched_exception(self.exceptions, code, feedback)
raise ExchangeError(feedback)
else:
return # no error
self.throw_exactly_matched_exception(self.exceptions, status, feedback)
raise ExchangeError(feedback)
| raise ArgumentsRequired(self.id + ' fetchTransactions() requires a currency `code` argument') |
retry_test.go | package output
import (
"testing"
"time"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/message"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/response"
"github.com/Jeffail/benthos/v3/lib/types"
)
func TestRetryConfigErrs(t *testing.T) {
conf := NewConfig()
conf.Type = "retry"
if _, err := New(conf, nil, log.Noop(), metrics.Noop()); err == nil {
t.Error("Expected error from bad retry output")
}
oConf := NewConfig()
conf.Retry.Output = &oConf
conf.Retry.Backoff.InitialInterval = "not a time period"
if _, err := New(conf, nil, log.Noop(), metrics.Noop()); err == nil {
t.Error("Expected error from bad initial period")
}
}
func TestRetryBasic(t *testing.T) {
conf := NewConfig()
childConf := NewConfig()
conf.Retry.Output = &childConf
output, err := NewRetry(conf, nil, log.Noop(), metrics.Noop())
if err != nil {
t.Fatal(err)
}
ret, ok := output.(*Retry)
if !ok {
t.Fatal("Failed to cast")
}
mOut := &mockOutput{
ts: make(chan types.Transaction),
}
ret.wrapped = mOut
tChan := make(chan types.Transaction)
resChan := make(chan types.Response)
if err = ret.Consume(tChan); err != nil {
t.Fatal(err)
}
testMsg := message.New(nil)
go func() {
select {
case tChan <- types.NewTransaction(testMsg, resChan):
case <-time.After(time.Second):
t.Fatal("timed out")
}
}()
var tran types.Transaction
select {
case tran = <-mOut.ts:
case <-time.After(time.Second):
t.Fatal("timed out")
}
if tran.Payload != testMsg {
t.Error("Wrong payload returned")
}
select {
case tran.ResponseChan <- response.NewAck():
case <-time.After(time.Second):
t.Fatal("timed out")
}
select {
case res := <-resChan:
if err = res.Error(); err != nil {
t.Error(err)
}
case <-time.After(time.Second):
t.Fatal("timed out")
}
output.CloseAsync()
if err = output.WaitForClose(time.Second); err != nil {
t.Error(err)
}
}
func TestRetrySadPath(t *testing.T) {
conf := NewConfig()
childConf := NewConfig()
conf.Retry.Output = &childConf
conf.Retry.Backoff.InitialInterval = "10us"
conf.Retry.Backoff.MaxInterval = "10us"
output, err := NewRetry(conf, nil, log.Noop(), metrics.Noop())
if err != nil {
t.Fatal(err)
}
ret, ok := output.(*Retry)
if !ok {
t.Fatal("Failed to cast")
}
mOut := &mockOutput{
ts: make(chan types.Transaction),
}
ret.wrapped = mOut
tChan := make(chan types.Transaction)
resChan := make(chan types.Response)
if err = ret.Consume(tChan); err != nil {
t.Fatal(err)
}
testMsg := message.New(nil)
tran := types.NewTransaction(testMsg, resChan)
go func() {
select {
case tChan <- tran:
case <-time.After(time.Second):
t.Fatal("timed out")
}
}()
for i := 0; i < 100; i++ {
select {
case tran = <-mOut.ts:
case <-resChan:
t.Fatal("Received response not retry")
case <-time.After(time.Second):
t.Fatal("timed out")
}
if tran.Payload != testMsg {
t.Error("Wrong payload returned")
}
select {
case tran.ResponseChan <- response.NewNoack():
case <-time.After(time.Second):
t.Fatal("timed out")
}
}
select {
case tran = <-mOut.ts:
case <-resChan:
t.Fatal("Received response not retry")
case <-time.After(time.Second):
t.Fatal("timed out")
}
if tran.Payload != testMsg {
t.Error("Wrong payload returned")
}
select {
case tran.ResponseChan <- response.NewAck():
case <-time.After(time.Second):
t.Fatal("timed out")
}
select {
case res := <-resChan:
if err = res.Error(); err != nil {
t.Error(err)
}
case <-time.After(time.Second):
t.Fatal("timed out")
}
output.CloseAsync()
if err = output.WaitForClose(time.Second); err != nil {
t.Error(err)
}
}
func expectFromRetry(
resReturn types.Response,
tChan <-chan types.Transaction,
t *testing.T,
responsesSlice ...string) {
t.Helper()
responses := map[string]struct{}{}
for _, k := range responsesSlice {
responses[k] = struct{}{}
}
resChans := []chan<- types.Response{}
for len(responses) > 0 {
select {
case tran := <-tChan:
act := string(tran.Payload.Get(0).Get())
if _, exists := responses[act]; exists {
delete(responses, act)
} else {
t.Errorf("Wrong result: %v", act)
}
resChans = append(resChans, tran.ResponseChan)
case <-time.After(time.Second):
t.Fatal("timed out")
}
}
for _, resChan := range resChans {
select {
case resChan <- resReturn:
case <-time.After(time.Second):
t.Fatal("timed out")
}
}
}
func sendForRetry(
value string,
tChan chan types.Transaction,
resChan chan types.Response,
t *testing.T,
) {
t.Helper()
select {
case tChan <- types.NewTransaction(message.New(
[][]byte{[]byte(value)},
), resChan):
case <-time.After(time.Second):
t.Fatal("timed out")
}
}
func ackForRetry(
exp types.Response,
resChan <-chan types.Response,
t *testing.T,
) {
t.Helper()
select {
case res := <-resChan:
if res.Error() != exp.Error() {
t.Errorf("Unexpected response error: %v != %v", res.Error(), exp.Error())
}
case <-time.After(time.Second):
t.Fatal("timed out")
}
}
func TestRetryParallel(t *testing.T) {
conf := NewConfig()
childConf := NewConfig()
conf.Retry.Output = &childConf
conf.Retry.Backoff.InitialInterval = "10us"
conf.Retry.Backoff.MaxInterval = "10us"
output, err := NewRetry(conf, nil, log.Noop(), metrics.Noop())
if err != nil {
t.Fatal(err)
}
ret, ok := output.(*Retry)
if !ok {
t.Fatal("Failed to cast")
}
mOut := &mockOutput{
ts: make(chan types.Transaction),
}
ret.wrapped = mOut
tChan := make(chan types.Transaction)
if err = ret.Consume(tChan); err != nil {
t.Fatal(err)
}
resChan1, resChan2 := make(chan types.Response), make(chan types.Response)
sendForRetry("first", tChan, resChan1, t)
expectFromRetry(response.NewNoack(), mOut.ts, t, "first")
sendForRetry("second", tChan, resChan2, t)
expectFromRetry(response.NewNoack(), mOut.ts, t, "first", "second")
select {
case tChan <- types.NewTransaction(nil, nil):
t.Fatal("Accepted transaction during retry loop")
default:
}
expectFromRetry(response.NewAck(), mOut.ts, t, "first", "second")
ackForRetry(response.NewAck(), resChan1, t)
ackForRetry(response.NewAck(), resChan2, t)
sendForRetry("third", tChan, resChan1, t)
expectFromRetry(response.NewAck(), mOut.ts, t, "third")
ackForRetry(response.NewAck(), resChan1, t) |
sendForRetry("fourth", tChan, resChan2, t)
expectFromRetry(response.NewNoack(), mOut.ts, t, "fourth")
expectFromRetry(response.NewAck(), mOut.ts, t, "fourth")
ackForRetry(response.NewAck(), resChan2, t)
output.CloseAsync()
if err = output.WaitForClose(time.Second); err != nil {
t.Error(err)
}
} | |
fileParser.py | import ActividadVolcanica.JcampReader.fileHandler as handler
import numpy as np
def | (filename):
ToParse = handler.read(filename)
ArgList = ToParse.split("##")
ArgList.remove("")
Parameters =dict()
for x in ArgList:
try:
y = x.split("=")
if y[0] != "XYDATA":
Parameters[y[0].replace("\n","")] = y[1].replace("\n","")
else:
Parameters[y[0].replace("\n","")] = y[1]
except:
pass
return Parameters
def normalize (data):
if "TRANSMITTANCE" in data["YUNITS"].upper():
data_array_ = np.rot90(np.array(toarray(data) ))
if data["XUNITS"].upper() == "MICROMETERS":
data_array_[1] = data_array_[1]*1000
elif data["XUNITS"].upper() == "NANOMETERS":
pass
elif data["XUNITS"].upper() == "1/CM":
data_array_[1] = 10000000/data_array_[1]
return data_array_
else:
print ("No data to normalize")
def toarray (data):
data_array=[]
increment = (float(data["LASTX"])-float(data["FIRSTX"]))/(float(data["NPOINTS"])-1)
data_set = data["XYDATA"].split("\n")
if "X++(Y..Y)" in data_set[0]:
for x in data_set[1:]:
y = x.split(" ")
for i in range (len(y)-1):
data_array.append([float(y[0])+i*increment, float(y[i+1])])
return data_array
if __name__ == '__main__':
pass
| parser |
ProfilesPanel.js | /*
* Copyright (C) 2008 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
const UserInitiatedProfileName = "org.webkit.profiles.user-initiated";
/**
* @constructor
* @extends {WebInspector.Object}
* @param {string} id
* @param {string} name
*/
WebInspector.ProfileType = function(id, name)
{
this._id = id;
this._name = name;
/**
* @type {WebInspector.SidebarSectionTreeElement}
*/
this.treeElement = null;
}
WebInspector.ProfileType.Events = {
ViewUpdated: "view-updated",
}
WebInspector.ProfileType.prototype = {
get statusBarItems()
{
return [];
},
get buttonTooltip()
{
return "";
},
get id()
{
return this._id;
},
get treeItemTitle()
{
return this._name;
},
get name()
{
return this._name;
},
/**
* @param {WebInspector.ProfilesPanel} profilesPanel
* @return {boolean}
*/
buttonClicked: function(profilesPanel)
{
return false;
},
reset: function()
{
},
get description()
{
return "";
},
/**
* @return {Element}
*/
decorationElement: function()
{
return null;
},
| * @return {WebInspector.ProfileHeader}
*/
createTemporaryProfile: function(title)
{
throw new Error("Needs implemented.");
},
/**
* @param {ProfilerAgent.ProfileHeader} profile
* @return {WebInspector.ProfileHeader}
*/
createProfile: function(profile)
{
throw new Error("Not supported for " + this._name + " profiles.");
},
__proto__: WebInspector.Object.prototype
}
/**
* @constructor
* @param {!WebInspector.ProfileType} profileType
* @param {string} title
* @param {number=} uid
*/
WebInspector.ProfileHeader = function(profileType, title, uid)
{
this._profileType = profileType;
this.title = title;
if (uid === undefined) {
this.uid = -1;
this.isTemporary = true;
} else {
this.uid = uid;
this.isTemporary = false;
}
this._fromFile = false;
}
WebInspector.ProfileHeader.prototype = {
/**
* @return {!WebInspector.ProfileType}
*/
profileType: function()
{
return this._profileType;
},
/**
* Must be implemented by subclasses.
* @return {WebInspector.ProfileSidebarTreeElement}
*/
createSidebarTreeElement: function()
{
throw new Error("Needs implemented.");
},
/**
* @return {?WebInspector.View}
*/
existingView: function()
{
return this._view;
},
/**
* @return {!WebInspector.View}
*/
view: function()
{
if (!this._view)
this._view = this.createView(WebInspector.ProfilesPanel._instance);
return this._view;
},
/**
* @param {WebInspector.ProfilesPanel} profilesPanel
* @return {!WebInspector.View}
*/
createView: function(profilesPanel)
{
throw new Error("Not implemented.");
},
/**
* @param {!WebInspector.ProfilesPanel} profilesPanel
*/
dispose: function(profilesPanel)
{
},
/**
* @param {Function} callback
*/
load: function(callback)
{
},
/**
* @return {boolean}
*/
canSaveToFile: function()
{
return false;
},
saveToFile: function()
{
throw new Error("Needs implemented");
},
/**
* @param {File} file
*/
loadFromFile: function(file)
{
throw new Error("Needs implemented");
},
/**
* @return {boolean}
*/
fromFile: function()
{
return this._fromFile;
}
}
/**
* @constructor
* @extends {WebInspector.Panel}
* @implements {WebInspector.ContextMenu.Provider}
*/
WebInspector.ProfilesPanel = function()
{
WebInspector.Panel.call(this, "profiles");
WebInspector.ProfilesPanel._instance = this;
this.registerRequiredCSS("panelEnablerView.css");
this.registerRequiredCSS("heapProfiler.css");
this.registerRequiredCSS("profilesPanel.css");
this.createSidebarViewWithTree();
this.profilesItemTreeElement = new WebInspector.ProfilesSidebarTreeElement(this);
this.sidebarTree.appendChild(this.profilesItemTreeElement);
this._profileTypesByIdMap = {};
var panelEnablerHeading = WebInspector.UIString("You need to enable profiling before you can use the Profiles panel.");
var panelEnablerDisclaimer = WebInspector.UIString("Enabling profiling will make scripts run slower.");
var panelEnablerButton = WebInspector.UIString("Enable Profiling");
this.panelEnablerView = new WebInspector.PanelEnablerView("profiles", panelEnablerHeading, panelEnablerDisclaimer, panelEnablerButton);
this.panelEnablerView.addEventListener("enable clicked", this.enableProfiler, this);
this.profileViews = document.createElement("div");
this.profileViews.id = "profile-views";
this.splitView.mainElement.appendChild(this.profileViews);
this._statusBarButtons = [];
this.enableToggleButton = new WebInspector.StatusBarButton("", "enable-toggle-status-bar-item");
if (Capabilities.profilerCausesRecompilation) {
this._statusBarButtons.push(this.enableToggleButton);
this.enableToggleButton.addEventListener("click", this._onToggleProfiling, this);
}
this.recordButton = new WebInspector.StatusBarButton("", "record-profile-status-bar-item");
this.recordButton.addEventListener("click", this.toggleRecordButton, this);
this._statusBarButtons.push(this.recordButton);
this.clearResultsButton = new WebInspector.StatusBarButton(WebInspector.UIString("Clear all profiles."), "clear-status-bar-item");
this.clearResultsButton.addEventListener("click", this._clearProfiles, this);
this._statusBarButtons.push(this.clearResultsButton);
if (WebInspector.experimentsSettings.liveNativeMemoryChart.isEnabled()) {
this.garbageCollectButton = new WebInspector.StatusBarButton(WebInspector.UIString("Collect Garbage"), "garbage-collect-status-bar-item");
this.garbageCollectButton.addEventListener("click", this._garbageCollectButtonClicked, this);
this._statusBarButtons.push(this.garbageCollectButton);
}
this._profileTypeStatusBarItemsContainer = document.createElement("div");
this._profileTypeStatusBarItemsContainer.className = "status-bar-items";
this._profileViewStatusBarItemsContainer = document.createElement("div");
this._profileViewStatusBarItemsContainer.className = "status-bar-items";
/** @type {!Array.<!WebInspector.ProfileHeader>} */
this._profiles = [];
this._profilerEnabled = !Capabilities.profilerCausesRecompilation;
this._launcherView = new WebInspector.ProfileLauncherView(this);
this._launcherView.addEventListener(WebInspector.ProfileLauncherView.EventTypes.ProfileTypeSelected, this._onProfileTypeSelected, this);
this._reset();
this._registerProfileType(new WebInspector.CPUProfileType());
if (!WebInspector.WorkerManager.isWorkerFrontend())
this._registerProfileType(new WebInspector.CSSSelectorProfileType());
if (Capabilities.heapProfilerPresent)
this._registerProfileType(new WebInspector.HeapSnapshotProfileType());
if (WebInspector.experimentsSettings.nativeMemorySnapshots.isEnabled()) {
this._registerProfileType(new WebInspector.NativeMemoryProfileType());
this._registerProfileType(new WebInspector.NativeSnapshotProfileType());
}
if (WebInspector.experimentsSettings.canvasInspection.isEnabled())
this._registerProfileType(new WebInspector.CanvasProfileType());
InspectorBackend.registerProfilerDispatcher(new WebInspector.ProfilerDispatcher(this));
InspectorBackend.registerMemoryDispatcher(new WebInspector.MemoryDispatcher(this));
this._createFileSelectorElement();
this.element.addEventListener("contextmenu", this._handleContextMenuEvent.bind(this), true);
WebInspector.ContextMenu.registerProvider(this);
}
WebInspector.ProfilesPanel.prototype = {
_createFileSelectorElement: function()
{
if (this._fileSelectorElement)
this.element.removeChild(this._fileSelectorElement);
this._fileSelectorElement = WebInspector.createFileSelectorElement(this._loadFromFile.bind(this));
this.element.appendChild(this._fileSelectorElement);
},
/**
* @param {!File} file
*/
_loadFromFile: function(file)
{
if (!file.name.endsWith(".heapsnapshot")) {
WebInspector.log(WebInspector.UIString("Only heap snapshots from files with extension '.heapsnapshot' can be loaded."));
return;
}
if (!!this.findTemporaryProfile(WebInspector.HeapSnapshotProfileType.TypeId)) {
WebInspector.log(WebInspector.UIString("Can't load profile when other profile is recording."));
return;
}
var profileType = this.getProfileType(WebInspector.HeapSnapshotProfileType.TypeId);
var temporaryProfile = profileType.createTemporaryProfile(UserInitiatedProfileName + "." + file.name);
this.addProfileHeader(temporaryProfile);
temporaryProfile._fromFile = true;
temporaryProfile.loadFromFile(file);
this._createFileSelectorElement();
},
get statusBarItems()
{
return this._statusBarButtons.select("element").concat(this._profileTypeStatusBarItemsContainer, this._profileViewStatusBarItemsContainer);
},
toggleRecordButton: function()
{
var isProfiling = this._selectedProfileType.buttonClicked(this);
this.recordButton.toggled = isProfiling;
this.recordButton.title = this._selectedProfileType.buttonTooltip;
if (isProfiling)
this._launcherView.profileStarted();
else
this._launcherView.profileFinished();
},
wasShown: function()
{
WebInspector.Panel.prototype.wasShown.call(this);
this._populateProfiles();
},
_profilerWasEnabled: function()
{
if (this._profilerEnabled)
return;
this._profilerEnabled = true;
this._reset();
if (this.isShowing())
this._populateProfiles();
},
_profilerWasDisabled: function()
{
if (!this._profilerEnabled)
return;
this._profilerEnabled = false;
this._reset();
},
/**
* @param {WebInspector.Event} event
*/
_onProfileTypeSelected: function(event)
{
this._selectedProfileType = /** @type {!WebInspector.ProfileType} */ (event.data);
this._updateProfileTypeSpecificUI();
},
_updateProfileTypeSpecificUI: function()
{
this.recordButton.title = this._selectedProfileType.buttonTooltip;
this._profileTypeStatusBarItemsContainer.removeChildren();
var statusBarItems = this._selectedProfileType.statusBarItems;
if (statusBarItems) {
for (var i = 0; i < statusBarItems.length; ++i)
this._profileTypeStatusBarItemsContainer.appendChild(statusBarItems[i]);
}
this._resize(this.splitView.sidebarWidth());
},
_reset: function()
{
WebInspector.Panel.prototype.reset.call(this);
for (var i = 0; i < this._profiles.length; ++i) {
var view = this._profiles[i].existingView();
if (view) {
view.detach();
if ("dispose" in view)
view.dispose();
}
this._profiles[i].dispose(this);
}
delete this.visibleView;
delete this.currentQuery;
this.searchCanceled();
for (var id in this._profileTypesByIdMap) {
var profileType = this._profileTypesByIdMap[id];
var treeElement = profileType.treeElement;
treeElement.removeChildren();
treeElement.hidden = true;
profileType.reset();
}
this._profiles = [];
this._profilesIdMap = {};
this._profileGroups = {};
this._profileGroupsForLinks = {};
this._profilesWereRequested = false;
this.recordButton.toggled = false;
if (this._selectedProfileType)
this.recordButton.title = this._selectedProfileType.buttonTooltip;
this._launcherView.profileFinished();
this.sidebarTreeElement.removeStyleClass("some-expandable");
this.profileViews.removeChildren();
this._profileViewStatusBarItemsContainer.removeChildren();
this.removeAllListeners();
this._updateInterface();
this.profilesItemTreeElement.select();
this._showLauncherView();
},
_showLauncherView: function()
{
this.closeVisibleView();
this._profileViewStatusBarItemsContainer.removeChildren();
this._launcherView.show(this.splitView.mainElement);
this.visibleView = this._launcherView;
},
_clearProfiles: function()
{
ProfilerAgent.clearProfiles();
this._reset();
},
_garbageCollectButtonClicked: function()
{
ProfilerAgent.collectGarbage();
},
/**
* @param {WebInspector.ProfileType} profileType
*/
_registerProfileType: function(profileType)
{
this._profileTypesByIdMap[profileType.id] = profileType;
this._launcherView.addProfileType(profileType);
profileType.treeElement = new WebInspector.SidebarSectionTreeElement(profileType.treeItemTitle, null, true);
profileType.treeElement.hidden = true;
this.sidebarTree.appendChild(profileType.treeElement);
profileType.treeElement.childrenListElement.addEventListener("contextmenu", this._handleContextMenuEvent.bind(this), true);
profileType.addEventListener(WebInspector.ProfileType.Events.ViewUpdated, this._updateProfileTypeSpecificUI, this);
},
/**
* @param {Event} event
*/
_handleContextMenuEvent: function(event)
{
var element = event.srcElement;
while (element && !element.treeElement && element !== this.element)
element = element.parentElement;
if (!element)
return;
if (element.treeElement && element.treeElement.handleContextMenuEvent) {
element.treeElement.handleContextMenuEvent(event);
return;
}
if (element !== this.element || event.srcElement === this.sidebarElement) {
var contextMenu = new WebInspector.ContextMenu(event);
if (this.visibleView instanceof WebInspector.HeapSnapshotView)
this.visibleView.populateContextMenu(contextMenu, event);
contextMenu.appendItem(WebInspector.UIString("Load Heap Snapshot\u2026"), this._fileSelectorElement.click.bind(this._fileSelectorElement));
contextMenu.show();
}
},
/**
* @param {string} text
* @param {string} profileTypeId
* @return {string}
*/
_makeTitleKey: function(text, profileTypeId)
{
return escape(text) + '/' + escape(profileTypeId);
},
/**
* @param {number} id
* @param {string} profileTypeId
* @return {string}
*/
_makeKey: function(id, profileTypeId)
{
return id + '/' + escape(profileTypeId);
},
/**
* @param {WebInspector.ProfileHeader} profile
*/
addProfileHeader: function(profile)
{
this._removeTemporaryProfile(profile.profileType().id);
var profileType = profile.profileType();
var typeId = profileType.id;
var sidebarParent = profileType.treeElement;
sidebarParent.hidden = false;
var small = false;
var alternateTitle;
this._profiles.push(profile);
this._profilesIdMap[this._makeKey(profile.uid, typeId)] = profile;
if (!profile.title.startsWith(UserInitiatedProfileName)) {
var profileTitleKey = this._makeTitleKey(profile.title, typeId);
if (!(profileTitleKey in this._profileGroups))
this._profileGroups[profileTitleKey] = [];
var group = this._profileGroups[profileTitleKey];
group.push(profile);
if (group.length === 2) {
// Make a group TreeElement now that there are 2 profiles.
group._profilesTreeElement = new WebInspector.ProfileGroupSidebarTreeElement(profile.title);
// Insert at the same index for the first profile of the group.
var index = sidebarParent.children.indexOf(group[0]._profilesTreeElement);
sidebarParent.insertChild(group._profilesTreeElement, index);
// Move the first profile to the group.
var selected = group[0]._profilesTreeElement.selected;
sidebarParent.removeChild(group[0]._profilesTreeElement);
group._profilesTreeElement.appendChild(group[0]._profilesTreeElement);
if (selected)
group[0]._profilesTreeElement.revealAndSelect();
group[0]._profilesTreeElement.small = true;
group[0]._profilesTreeElement.mainTitle = WebInspector.UIString("Run %d", 1);
this.sidebarTreeElement.addStyleClass("some-expandable");
}
if (group.length >= 2) {
sidebarParent = group._profilesTreeElement;
alternateTitle = WebInspector.UIString("Run %d", group.length);
small = true;
}
}
var profileTreeElement = profile.createSidebarTreeElement();
profile.sidebarElement = profileTreeElement;
profileTreeElement.small = small;
if (alternateTitle)
profileTreeElement.mainTitle = alternateTitle;
profile._profilesTreeElement = profileTreeElement;
sidebarParent.appendChild(profileTreeElement);
if (!profile.isTemporary) {
if (!this.visibleView)
this.showProfile(profile);
this.dispatchEventToListeners("profile added", {
type: typeId
});
}
},
/**
* @param {WebInspector.ProfileHeader} profile
*/
_removeProfileHeader: function(profile)
{
var sidebarParent = profile.profileType().treeElement;
for (var i = 0; i < this._profiles.length; ++i) {
if (this._profiles[i].uid === profile.uid) {
profile = this._profiles[i];
this._profiles.splice(i, 1);
profile.dispose(this);
break;
}
}
delete this._profilesIdMap[this._makeKey(profile.uid, profile.profileType().id)];
var profileTitleKey = this._makeTitleKey(profile.title, profile.profileType().id);
delete this._profileGroups[profileTitleKey];
sidebarParent.removeChild(profile._profilesTreeElement);
if (!profile.isTemporary)
ProfilerAgent.removeProfile(profile.profileType().id, profile.uid);
// No other item will be selected if there aren't any other profiles, so
// make sure that view gets cleared when the last profile is removed.
if (!sidebarParent.children.length) {
this.profilesItemTreeElement.select();
this._showLauncherView();
sidebarParent.hidden = true;
}
},
/**
* @param {WebInspector.ProfileHeader} profile
*/
showProfile: function(profile)
{
if (!profile || profile.isTemporary)
return;
var view = profile.view();
if (view === this.visibleView)
return;
this.closeVisibleView();
view.show(this.profileViews);
profile._profilesTreeElement._suppressOnSelect = true;
profile._profilesTreeElement.revealAndSelect();
delete profile._profilesTreeElement._suppressOnSelect;
this.visibleView = view;
this._profileViewStatusBarItemsContainer.removeChildren();
var statusBarItems = view.statusBarItems;
if (statusBarItems)
for (var i = 0; i < statusBarItems.length; ++i)
this._profileViewStatusBarItemsContainer.appendChild(statusBarItems[i]);
},
/**
* @param {string} typeId
* @return {!Array.<!WebInspector.ProfileHeader>}
*/
getProfiles: function(typeId)
{
var result = [];
var profilesCount = this._profiles.length;
for (var i = 0; i < profilesCount; ++i) {
var profile = this._profiles[i];
if (!profile.isTemporary && profile.profileType().id === typeId)
result.push(profile);
}
return result;
},
/**
* @param {ProfilerAgent.HeapSnapshotObjectId} snapshotObjectId
* @param {string} viewName
*/
showObject: function(snapshotObjectId, viewName)
{
var heapProfiles = this.getProfiles(WebInspector.HeapSnapshotProfileType.TypeId);
for (var i = 0; i < heapProfiles.length; i++) {
var profile = heapProfiles[i];
// TODO: allow to choose snapshot if there are several options.
if (profile.maxJSObjectId >= snapshotObjectId) {
this.showProfile(profile);
profile.view().changeView(viewName, function() {
profile.view().dataGrid.highlightObjectByHeapSnapshotId(snapshotObjectId);
});
break;
}
}
},
/**
* @param {string} typeId
* @return {WebInspector.ProfileHeader}
*/
findTemporaryProfile: function(typeId)
{
var profilesCount = this._profiles.length;
for (var i = 0; i < profilesCount; ++i)
if (this._profiles[i].profileType().id === typeId && this._profiles[i].isTemporary)
return this._profiles[i];
return null;
},
/**
* @param {string} typeId
*/
_removeTemporaryProfile: function(typeId)
{
var temporaryProfile = this.findTemporaryProfile(typeId);
if (temporaryProfile)
this._removeProfileHeader(temporaryProfile);
},
/**
* @param {string} typeId
* @param {number} uid
*/
getProfile: function(typeId, uid)
{
return this._profilesIdMap[this._makeKey(uid, typeId)];
},
/**
* @param {number} uid
* @param {string} chunk
*/
_addHeapSnapshotChunk: function(uid, chunk)
{
var profile = this._profilesIdMap[this._makeKey(uid, WebInspector.HeapSnapshotProfileType.TypeId)];
if (!profile)
return;
profile.transferChunk(chunk);
},
/**
* @param {number} uid
*/
_finishHeapSnapshot: function(uid)
{
var profile = this._profilesIdMap[this._makeKey(uid, WebInspector.HeapSnapshotProfileType.TypeId)];
if (!profile)
return;
profile.finishHeapSnapshot();
},
/**
* @param {WebInspector.View} view
*/
showView: function(view)
{
this.showProfile(view.profile);
},
/**
* @param {string} typeId
*/
getProfileType: function(typeId)
{
return this._profileTypesByIdMap[typeId];
},
/**
* @param {string} url
*/
showProfileForURL: function(url)
{
var match = url.match(WebInspector.ProfileURLRegExp);
if (!match)
return;
this.showProfile(this._profilesIdMap[this._makeKey(Number(match[3]), match[1])]);
},
closeVisibleView: function()
{
if (this.visibleView)
this.visibleView.detach();
delete this.visibleView;
},
/**
* @param {string} title
* @param {string} typeId
*/
displayTitleForProfileLink: function(title, typeId)
{
title = unescape(title);
if (title.startsWith(UserInitiatedProfileName)) {
title = WebInspector.UIString("Profile %d", title.substring(UserInitiatedProfileName.length + 1));
} else {
var titleKey = this._makeTitleKey(title, typeId);
if (!(titleKey in this._profileGroupsForLinks))
this._profileGroupsForLinks[titleKey] = 0;
var groupNumber = ++this._profileGroupsForLinks[titleKey];
if (groupNumber > 2)
// The title is used in the console message announcing that a profile has started so it gets
// incremented twice as often as it's displayed
title += " " + WebInspector.UIString("Run %d", (groupNumber + 1) / 2);
}
return title;
},
/**
* @param {string} query
*/
performSearch: function(query)
{
this.searchCanceled();
var searchableViews = this._searchableViews();
if (!searchableViews || !searchableViews.length)
return;
var visibleView = this.visibleView;
var matchesCountUpdateTimeout = null;
function updateMatchesCount()
{
WebInspector.searchController.updateSearchMatchesCount(this._totalSearchMatches, this);
WebInspector.searchController.updateCurrentMatchIndex(this._currentSearchResultIndex, this);
matchesCountUpdateTimeout = null;
}
function updateMatchesCountSoon()
{
if (matchesCountUpdateTimeout)
return;
// Update the matches count every half-second so it doesn't feel twitchy.
matchesCountUpdateTimeout = setTimeout(updateMatchesCount.bind(this), 500);
}
function finishedCallback(view, searchMatches)
{
if (!searchMatches)
return;
this._totalSearchMatches += searchMatches;
this._searchResults.push(view);
if (this.searchMatchFound)
this.searchMatchFound(view, searchMatches);
updateMatchesCountSoon.call(this);
if (view === visibleView)
view.jumpToFirstSearchResult();
}
var i = 0;
var panel = this;
var boundFinishedCallback = finishedCallback.bind(this);
var chunkIntervalIdentifier = null;
// Split up the work into chunks so we don't block the
// UI thread while processing.
function processChunk()
{
var view = searchableViews[i];
if (++i >= searchableViews.length) {
if (panel._currentSearchChunkIntervalIdentifier === chunkIntervalIdentifier)
delete panel._currentSearchChunkIntervalIdentifier;
clearInterval(chunkIntervalIdentifier);
}
if (!view)
return;
view.currentQuery = query;
view.performSearch(query, boundFinishedCallback);
}
processChunk();
chunkIntervalIdentifier = setInterval(processChunk, 25);
this._currentSearchChunkIntervalIdentifier = chunkIntervalIdentifier;
},
jumpToNextSearchResult: function()
{
if (!this.showView || !this._searchResults || !this._searchResults.length)
return;
var showFirstResult = false;
this._currentSearchResultIndex = this._searchResults.indexOf(this.visibleView);
if (this._currentSearchResultIndex === -1) {
this._currentSearchResultIndex = 0;
showFirstResult = true;
}
var currentView = this._searchResults[this._currentSearchResultIndex];
if (currentView.showingLastSearchResult()) {
if (++this._currentSearchResultIndex >= this._searchResults.length)
this._currentSearchResultIndex = 0;
currentView = this._searchResults[this._currentSearchResultIndex];
showFirstResult = true;
}
WebInspector.searchController.updateCurrentMatchIndex(this._currentSearchResultIndex, this);
if (currentView !== this.visibleView) {
this.showView(currentView);
WebInspector.searchController.showSearchField();
}
if (showFirstResult)
currentView.jumpToFirstSearchResult();
else
currentView.jumpToNextSearchResult();
},
jumpToPreviousSearchResult: function()
{
if (!this.showView || !this._searchResults || !this._searchResults.length)
return;
var showLastResult = false;
this._currentSearchResultIndex = this._searchResults.indexOf(this.visibleView);
if (this._currentSearchResultIndex === -1) {
this._currentSearchResultIndex = 0;
showLastResult = true;
}
var currentView = this._searchResults[this._currentSearchResultIndex];
if (currentView.showingFirstSearchResult()) {
if (--this._currentSearchResultIndex < 0)
this._currentSearchResultIndex = (this._searchResults.length - 1);
currentView = this._searchResults[this._currentSearchResultIndex];
showLastResult = true;
}
WebInspector.searchController.updateCurrentMatchIndex(this._currentSearchResultIndex, this);
if (currentView !== this.visibleView) {
this.showView(currentView);
WebInspector.searchController.showSearchField();
}
if (showLastResult)
currentView.jumpToLastSearchResult();
else
currentView.jumpToPreviousSearchResult();
},
_searchableViews: function()
{
var views = [];
const visibleView = this.visibleView;
if (visibleView && visibleView.performSearch)
views.push(visibleView);
var profilesLength = this._profiles.length;
for (var i = 0; i < profilesLength; ++i) {
var profile = this._profiles[i];
var view = profile.view();
if (!view.performSearch || view === visibleView)
continue;
views.push(view);
}
return views;
},
searchMatchFound: function(view, matches)
{
view.profile._profilesTreeElement.searchMatches = matches;
},
searchCanceled: function()
{
if (this._searchResults) {
for (var i = 0; i < this._searchResults.length; ++i) {
var view = this._searchResults[i];
if (view.searchCanceled)
view.searchCanceled();
delete view.currentQuery;
}
}
WebInspector.Panel.prototype.searchCanceled.call(this);
if (this._currentSearchChunkIntervalIdentifier) {
clearInterval(this._currentSearchChunkIntervalIdentifier);
delete this._currentSearchChunkIntervalIdentifier;
}
this._totalSearchMatches = 0;
this._currentSearchResultIndex = 0;
this._searchResults = [];
if (!this._profiles)
return;
for (var i = 0; i < this._profiles.length; ++i) {
var profile = this._profiles[i];
profile._profilesTreeElement.searchMatches = 0;
}
},
_updateInterface: function()
{
// FIXME: Replace ProfileType-specific button visibility changes by a single ProfileType-agnostic "combo-button" visibility change.
if (this._profilerEnabled) {
this.enableToggleButton.title = WebInspector.UIString("Profiling enabled. Click to disable.");
this.enableToggleButton.toggled = true;
this.recordButton.visible = true;
this._profileViewStatusBarItemsContainer.removeStyleClass("hidden");
this.clearResultsButton.element.removeStyleClass("hidden");
this.panelEnablerView.detach();
} else {
this.enableToggleButton.title = WebInspector.UIString("Profiling disabled. Click to enable.");
this.enableToggleButton.toggled = false;
this.recordButton.visible = false;
this._profileViewStatusBarItemsContainer.addStyleClass("hidden");
this.clearResultsButton.element.addStyleClass("hidden");
this.panelEnablerView.show(this.element);
}
},
get profilerEnabled()
{
return this._profilerEnabled;
},
enableProfiler: function()
{
if (this._profilerEnabled)
return;
this._toggleProfiling(this.panelEnablerView.alwaysEnabled);
},
disableProfiler: function()
{
if (!this._profilerEnabled)
return;
this._toggleProfiling(this.panelEnablerView.alwaysEnabled);
},
/**
* @param {WebInspector.Event} event
*/
_onToggleProfiling: function(event) {
this._toggleProfiling(true);
},
/**
* @param {boolean} always
*/
_toggleProfiling: function(always)
{
if (this._profilerEnabled) {
WebInspector.settings.profilerEnabled.set(false);
ProfilerAgent.disable(this._profilerWasDisabled.bind(this));
} else {
WebInspector.settings.profilerEnabled.set(always);
ProfilerAgent.enable(this._profilerWasEnabled.bind(this));
}
},
_populateProfiles: function()
{
if (!this._profilerEnabled || this._profilesWereRequested)
return;
/**
* @param {?string} error
* @param {Array.<ProfilerAgent.ProfileHeader>} profileHeaders
*/
function populateCallback(error, profileHeaders) {
if (error)
return;
profileHeaders.sort(function(a, b) { return a.uid - b.uid; });
var profileHeadersLength = profileHeaders.length;
for (var i = 0; i < profileHeadersLength; ++i) {
var profileHeader = profileHeaders[i];
var profileType = this.getProfileType(profileHeader.typeId);
this.addProfileHeader(profileType.createProfile(profileHeader));
}
}
ProfilerAgent.getProfileHeaders(populateCallback.bind(this));
this._profilesWereRequested = true;
},
/**
* @param {!WebInspector.Event} event
*/
sidebarResized: function(event)
{
var sidebarWidth = /** @type {number} */ (event.data);
this._resize(sidebarWidth);
},
onResize: function()
{
this._resize(this.splitView.sidebarWidth());
},
/**
* @param {number} sidebarWidth
*/
_resize: function(sidebarWidth)
{
var lastItemElement = this._statusBarButtons[this._statusBarButtons.length - 1].element;
var left = lastItemElement.totalOffsetLeft() + lastItemElement.offsetWidth;
this._profileTypeStatusBarItemsContainer.style.left = left + "px";
left += this._profileTypeStatusBarItemsContainer.offsetWidth - 1;
this._profileViewStatusBarItemsContainer.style.left = Math.max(left, sidebarWidth) + "px";
},
/**
* @param {string} profileType
* @param {boolean} isProfiling
*/
setRecordingProfile: function(profileType, isProfiling)
{
var profileTypeObject = this.getProfileType(profileType);
profileTypeObject.setRecordingProfile(isProfiling);
this.recordButton.toggled = isProfiling;
this.recordButton.title = profileTypeObject.buttonTooltip;
if (isProfiling) {
this._launcherView.profileStarted();
if (!this.findTemporaryProfile(profileType))
this.addProfileHeader(profileTypeObject.createTemporaryProfile());
} else {
this._launcherView.profileFinished();
this._removeTemporaryProfile(profileType);
}
},
takeHeapSnapshot: function()
{
var temporaryRecordingProfile = this.findTemporaryProfile(WebInspector.HeapSnapshotProfileType.TypeId);
if (!temporaryRecordingProfile) {
var profileTypeObject = this.getProfileType(WebInspector.HeapSnapshotProfileType.TypeId);
this.addProfileHeader(profileTypeObject.createTemporaryProfile());
}
this._launcherView.profileStarted();
function done() {
this._launcherView.profileFinished();
}
ProfilerAgent.takeHeapSnapshot(true, done.bind(this));
WebInspector.userMetrics.ProfilesHeapProfileTaken.record();
},
/**
* @param {number} done
* @param {number} total
*/
_reportHeapSnapshotProgress: function(done, total)
{
var temporaryProfile = this.findTemporaryProfile(WebInspector.HeapSnapshotProfileType.TypeId);
if (temporaryProfile) {
temporaryProfile.sidebarElement.subtitle = WebInspector.UIString("%.2f%", (done / total) * 100);
temporaryProfile.sidebarElement.wait = true;
if (done >= total)
this._removeTemporaryProfile(WebInspector.HeapSnapshotProfileType.TypeId);
}
},
/**
* @param {WebInspector.ContextMenu} contextMenu
* @param {Object} target
*/
appendApplicableItems: function(event, contextMenu, target)
{
if (WebInspector.inspectorView.currentPanel() !== this)
return;
var object = /** @type {WebInspector.RemoteObject} */ (target);
var objectId = object.objectId;
if (!objectId)
return;
var heapProfiles = this.getProfiles(WebInspector.HeapSnapshotProfileType.TypeId);
if (!heapProfiles.length)
return;
function revealInView(viewName)
{
ProfilerAgent.getHeapObjectId(objectId, didReceiveHeapObjectId.bind(this, viewName));
}
function didReceiveHeapObjectId(viewName, error, result)
{
if (WebInspector.inspectorView.currentPanel() !== this)
return;
if (!error)
this.showObject(result, viewName);
}
contextMenu.appendItem(WebInspector.UIString("Reveal in Dominators View"), revealInView.bind(this, "Dominators"));
contextMenu.appendItem(WebInspector.UIString("Reveal in Summary View"), revealInView.bind(this, "Summary"));
},
__proto__: WebInspector.Panel.prototype
}
/**
* @constructor
* @implements {MemoryAgent.Dispatcher}
* @param {WebInspector.ProfilesPanel} profilesPanel
*/
WebInspector.MemoryDispatcher = function(profilesPanel)
{
this._profilesPanel = profilesPanel;
}
WebInspector.MemoryDispatcher.prototype = {
/**
* @override
* @param {number} uid
* @param {string} chunk
*/
addNativeSnapshotChunk: function(chunk)
{
var profile = this._profilesPanel.findTemporaryProfile(WebInspector.NativeSnapshotProfileType.TypeId);
if (!profile)
return;
profile.addNativeSnapshotChunk(chunk);
}
}
/**
* @constructor
* @implements {ProfilerAgent.Dispatcher}
* @param {WebInspector.ProfilesPanel} profilesPanel
*/
WebInspector.ProfilerDispatcher = function(profilesPanel)
{
this._profilesPanel = profilesPanel;
}
WebInspector.ProfilerDispatcher.prototype = {
/**
* @param {ProfilerAgent.ProfileHeader} profile
*/
addProfileHeader: function(profile)
{
var profileType = this._profilesPanel.getProfileType(profile.typeId);
this._profilesPanel.addProfileHeader(profileType.createProfile(profile));
},
/**
* @override
* @param {number} uid
* @param {string} chunk
*/
addHeapSnapshotChunk: function(uid, chunk)
{
this._profilesPanel._addHeapSnapshotChunk(uid, chunk);
},
/**
* @override
* @param {number} uid
*/
finishHeapSnapshot: function(uid)
{
this._profilesPanel._finishHeapSnapshot(uid);
},
/**
* @override
* @param {boolean} isProfiling
*/
setRecordingProfile: function(isProfiling)
{
this._profilesPanel.setRecordingProfile(WebInspector.CPUProfileType.TypeId, isProfiling);
},
/**
* @override
*/
resetProfiles: function()
{
this._profilesPanel._reset();
},
/**
* @override
* @param {number} done
* @param {number} total
*/
reportHeapSnapshotProgress: function(done, total)
{
this._profilesPanel._reportHeapSnapshotProgress(done, total);
}
}
/**
* @constructor
* @extends {WebInspector.SidebarTreeElement}
* @param {!WebInspector.ProfileHeader} profile
* @param {string} titleFormat
* @param {string} className
*/
WebInspector.ProfileSidebarTreeElement = function(profile, titleFormat, className)
{
this.profile = profile;
this._titleFormat = titleFormat;
if (this.profile.title.startsWith(UserInitiatedProfileName))
this._profileNumber = this.profile.title.substring(UserInitiatedProfileName.length + 1);
WebInspector.SidebarTreeElement.call(this, className, "", "", profile, false);
this.refreshTitles();
}
WebInspector.ProfileSidebarTreeElement.prototype = {
onselect: function()
{
if (!this._suppressOnSelect)
this.treeOutline.panel.showProfile(this.profile);
},
ondelete: function()
{
this.treeOutline.panel._removeProfileHeader(this.profile);
return true;
},
get mainTitle()
{
if (this._mainTitle)
return this._mainTitle;
if (this.profile.title.startsWith(UserInitiatedProfileName))
return WebInspector.UIString(this._titleFormat, this._profileNumber);
return this.profile.title;
},
set mainTitle(x)
{
this._mainTitle = x;
this.refreshTitles();
},
set searchMatches(matches)
{
if (!matches) {
if (!this.bubbleElement)
return;
this.bubbleElement.removeStyleClass("search-matches");
this.bubbleText = "";
return;
}
this.bubbleText = matches;
this.bubbleElement.addStyleClass("search-matches");
},
/**
* @param {!Event} event
*/
handleContextMenuEvent: function(event)
{
var profile = this.profile;
var contextMenu = new WebInspector.ContextMenu(event);
var profilesPanel = WebInspector.ProfilesPanel._instance;
// FIXME: use context menu provider
if (profile.canSaveToFile()) {
contextMenu.appendItem(WebInspector.UIString("Save Heap Snapshot\u2026"), profile.saveToFile.bind(profile));
contextMenu.appendItem(WebInspector.UIString("Load Heap Snapshot\u2026"), profilesPanel._fileSelectorElement.click.bind(profilesPanel._fileSelectorElement));
contextMenu.appendItem(WebInspector.UIString("Delete Heap Snapshot"), this.ondelete.bind(this));
} else {
contextMenu.appendItem(WebInspector.UIString("Load Heap Snapshot\u2026"), profilesPanel._fileSelectorElement.click.bind(profilesPanel._fileSelectorElement));
contextMenu.appendItem(WebInspector.UIString("Delete profile"), this.ondelete.bind(this));
}
contextMenu.show();
},
__proto__: WebInspector.SidebarTreeElement.prototype
}
/**
* @constructor
* @extends {WebInspector.SidebarTreeElement}
* @param {string} title
* @param {string=} subtitle
*/
WebInspector.ProfileGroupSidebarTreeElement = function(title, subtitle)
{
WebInspector.SidebarTreeElement.call(this, "profile-group-sidebar-tree-item", title, subtitle, null, true);
}
WebInspector.ProfileGroupSidebarTreeElement.prototype = {
onselect: function()
{
if (this.children.length > 0)
WebInspector.ProfilesPanel._instance.showProfile(this.children[this.children.length - 1].profile);
},
__proto__: WebInspector.SidebarTreeElement.prototype
}
/**
* @constructor
* @extends {WebInspector.SidebarTreeElement}
* @param {!WebInspector.ProfilesPanel} panel
*/
WebInspector.ProfilesSidebarTreeElement = function(panel)
{
this._panel = panel;
this.small = false;
WebInspector.SidebarTreeElement.call(this, "profile-launcher-view-tree-item", WebInspector.UIString("Profiles"), "", null, false);
}
WebInspector.ProfilesSidebarTreeElement.prototype = {
onselect: function()
{
this._panel._showLauncherView();
},
get selectable()
{
return true;
},
__proto__: WebInspector.SidebarTreeElement.prototype
}
importScript("ProfileDataGridTree.js");
importScript("BottomUpProfileDataGridTree.js");
importScript("CPUProfileView.js");
importScript("CSSSelectorProfileView.js");
importScript("HeapSnapshot.js");
importScript("HeapSnapshotDataGrids.js");
importScript("HeapSnapshotGridNodes.js");
importScript("HeapSnapshotLoader.js");
importScript("HeapSnapshotProxy.js");
importScript("HeapSnapshotView.js");
importScript("HeapSnapshotWorkerDispatcher.js");
importScript("JSHeapSnapshot.js");
importScript("NativeHeapSnapshot.js");
importScript("NativeMemorySnapshotView.js");
importScript("ProfileLauncherView.js");
importScript("TopDownProfileDataGridTree.js");
importScript("CanvasProfileView.js"); | // Must be implemented by subclasses.
/**
* @param {string=} title |
args.rs | use clap::{App, Arg, ArgMatches, SubCommand};
pub(crate) fn get_matches<'a>() -> ArgMatches<'a> | {
App::new({{crate_name}}_core::APPNAME)
.version("0.0.20")
.author(clap::crate_authors!())
.about("Starts the HTTP server and (optionally) opens a webview")
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("DIRECTORY")
.help("Sets a custom config directory (defaults to \".\")")
.takes_value(true)
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.help("Display more output")
.takes_value(false)
)
.subcommand(
SubCommand::with_name("server")
.arg(
Arg::with_name("address")
.short("a")
.long("address")
.value_name("IP_ADDRESS")
.help("Defines the host to listen on (defaults to \"127.0.0.1\")")
.takes_value(true)
)
.arg(
Arg::with_name("port")
.short("p")
.long("port")
.value_name("PORT_NUM")
.help(&format!(
"Configures the server to use the provided port (defaults to {})",
crate::cfg::DEFAULT_PORT
))
.takes_value(true)
)
)
.get_matches()
} |
|
ngx-datetime-range-picker.service.ts | import { Injectable } from "@angular/core";
import {
DEFAULT_DATE_FORMAT,
NgxDatetimeRangePickerConstants as Constants
} from "./ngx-datetime-range-picker.constants";
import { getNotAvailableText, cloneDeep, isNil } from "./ngx-datetime-range-picker.utils";
import {
Options,
Settings,
CalendarSides,
State,
RowItemVariables,
RowItemOptions,
DateCharacteristics,
Config,
RowOptions,
CalendarTypes,
RowVariables,
CalendarType
} from "./interfaces";
import { Moment } from "moment";
declare var require: any;
const moment = require("moment");
const DEFAULT_TIME_FORMAT = Constants.DEFAULT.TIME_FORMAT;
const MONTHS_AVAILABLE = Constants.CONSTANT.MONTHS_AVAILABLE;
const TZ_NAMES = Constants.CONSTANT.TZ_NAMES;
const DEFAULT_RANGES = Constants.DEFAULT.RANGES;
const MOMENT_CONVERSION_MAP = Constants.CONSTANT.MOMENT_CONVERSION_MAP;
@Injectable({
providedIn: "root"
})
export class | {
getDefaultOptions(): Options {
return cloneDeep(Constants.DEFAULT.OPTIONS) as Options;
}
getDefaultSettings(): Settings {
return cloneDeep(Constants.DEFAULT.SETTINGS) as Settings;
}
getDefaultState(): State {
return cloneDeep(Constants.DEFAULT.STATE) as State;
}
checkSettingsValidity(settings: Settings) {
if (settings.type && !CalendarTypes.includes(settings.type as CalendarType)) {
const errMsg = `${settings.type} is an invalid calendar type. It should one of ${[...CalendarTypes]}`;
throw new Error(errMsg);
}
}
formatDateToDefaultFormat(date: string | number, format: string): string {
let formattedDate = null;
if (!date) {
return;
}
if (!isNaN(Number(date))) {
formattedDate = moment(date).format(DEFAULT_DATE_FORMAT);
} else {
formattedDate = moment(date, format).format(DEFAULT_DATE_FORMAT);
}
return formattedDate;
}
formatTimeToDefaultFormat(time: string) {
let formattedTime = null;
if (!time) {
return;
}
if (time.indexOf(":") > -1) {
if (time.indexOf("AM") > -1 || time.indexOf("PM") > -1) {
formattedTime = moment(time, "h:mm A").format(DEFAULT_TIME_FORMAT);
} else {
formattedTime = time;
}
} else {
console.warn(
`WARN_NGX_DATETIME_RANGE_PICKER:
The provided time is not in correct format.
Format: HH:mm or hh:mm A
`
);
}
return formattedTime;
}
getCalendarRowNumberText(type, number) {
return (() => {
switch (type) {
case "daily":
return `W${number}`;
case "weekly":
return "";
case "monthly":
return `Q${number}`;
case "quarterly":
return `${number}`;
case "yearly":
return "";
}
})();
}
createDefaultRanges(config: Config): Object {
const ranges = {};
const type: string = config.type;
const maxDate: string = cloneDeep(config.maxDate) as string;
DEFAULT_RANGES[type].forEach((rangeInfo: { label: string; count: number }) => {
ranges[rangeInfo.label] = {
startDate: moment(maxDate, DEFAULT_DATE_FORMAT)
.subtract(rangeInfo.count, MOMENT_CONVERSION_MAP[type])
.format(DEFAULT_DATE_FORMAT),
endDate: maxDate
};
});
ranges["Custom Range"] = { startDate: null, endDate: null };
return ranges;
}
getSanitizedDateArray(config: Config): string[] {
const sanitizedDateArray: string[] = [];
const type = config.type;
const dateArray = config.dateArray;
const inputDateFormat = config.inputDateFormat;
// dateArray can have nulls
dateArray.forEach((date) => {
if (!date) {
return;
}
let format: string = null;
if (isNaN(Number(date))) {
if (inputDateFormat) {
format = inputDateFormat;
} else {
format = moment(date)._f; // moment does not support this
}
}
if (inputDateFormat !== moment(date)._f) {
console.warn(
`ERR_NGX_DATETIME_RANGE_PICKER:
inputDateFormat !== dateFormat in dateArray.
Converted dates might not be as expected
`
);
}
const value: Moment = format ? moment(date, format) : moment(date);
if (value) {
const formattedDate = value.endOf(MOMENT_CONVERSION_MAP[type as string]).format(DEFAULT_DATE_FORMAT);
sanitizedDateArray.push(formattedDate);
} else {
console.warn(
`ERR_NGX_DATETIME_RANGE_PICKER:
dateArray values are in unknown format.
Pass the format or pass the dates in known format
`
);
}
});
return [...new Set(sanitizedDateArray)];
}
getNumberOfWeeks(date): number {
if (!date) {
return;
}
const monthStart: number = moment(date, DEFAULT_DATE_FORMAT)
.startOf("month")
.day();
const monthEnd: number = Number(
moment(date, DEFAULT_DATE_FORMAT)
.endOf("month")
.format("D")
);
return Math.ceil((monthStart + monthEnd) / 7);
}
getYearlyWeekCount(year: string): number {
if (!year) {
return;
}
const yearStartDate: string = moment(year, "YYYY")
.startOf("year")
.format(DEFAULT_DATE_FORMAT);
const yearEndDate: string = moment(year, "YYYY")
.endOf("year")
.format(DEFAULT_DATE_FORMAT);
const yearEndWeekEndDate: string = moment(yearEndDate, DEFAULT_DATE_FORMAT)
.startOf("week")
.format(DEFAULT_DATE_FORMAT);
const yearStartWeekEndDate: string = moment(yearStartDate, DEFAULT_DATE_FORMAT)
.endOf("week")
.format(DEFAULT_DATE_FORMAT);
const yearStartWeekNumber: number = this.getWeekNumber(yearStartWeekEndDate) as number;
const yearEndWeekNumber: number = this.getWeekNumber(yearEndWeekEndDate) as number;
return yearEndWeekNumber - yearStartWeekNumber + 1;
}
getMonthsAvailable(minDate, maxDate, selectedYear): string[] {
const months: string[] = [];
if (!minDate || !maxDate || !selectedYear) {
return;
}
minDate = moment(minDate, DEFAULT_DATE_FORMAT).startOf("month");
maxDate = moment(maxDate, DEFAULT_DATE_FORMAT).startOf("month");
let minDatems: number = moment(minDate, DEFAULT_DATE_FORMAT).valueOf();
let maxDatems: number = moment(maxDate, DEFAULT_DATE_FORMAT).valueOf();
const yearStartms: number = moment()
.year(selectedYear)
.startOf("year")
.valueOf();
const yearEndms: number = moment()
.year(selectedYear)
.endOf("year")
.valueOf();
if (minDatems < yearStartms) {
minDatems = yearStartms;
}
if (maxDatems > yearEndms) {
maxDatems = yearEndms;
}
let minDateMonthNumber: number = moment(minDatems).month();
const diff: number = moment(maxDatems).diff(moment(minDatems), "months");
const maxMonths: number = diff < MONTHS_AVAILABLE.length ? diff : MONTHS_AVAILABLE.length;
for (let i = 0; i <= maxMonths; i++) {
if (minDateMonthNumber >= MONTHS_AVAILABLE.length) {
months.push(MONTHS_AVAILABLE[minDateMonthNumber - MONTHS_AVAILABLE.length]);
} else {
months.push(MONTHS_AVAILABLE[minDateMonthNumber]);
}
minDateMonthNumber++;
}
return months;
}
getYearsAvailable(config: Config): string[] {
const minDate: string | number = config ? config.minDate : "";
const maxDate: string | number = config ? config.maxDate : "";
const years: string[] = [];
if (minDate && maxDate) {
const minYear: number = Number(this.getSelectedYear(minDate));
const maxYear: number = Number(this.getSelectedYear(maxDate));
const diff = maxYear - minYear;
for (let i = 0; i <= diff; i++) {
years.push(`${minYear + i}`);
}
}
return years.reverse();
}
isDateAvailable(
date: number,
minDate: number,
maxDate: number,
startDate: number,
endDate: number,
monthStartDate: number,
monthEndDate: number,
config: Config
): boolean {
let available = false;
const type: string = config.type;
const disableWeekends: boolean = config.disableWeekends;
const disableWeekdays: boolean = config.disableWeekdays;
if (type === "daily") {
minDate = minDate > monthStartDate ? minDate : monthStartDate;
maxDate = maxDate < monthEndDate ? maxDate : monthEndDate;
}
if (date >= minDate && date <= maxDate) {
available = true;
if (available) {
if (disableWeekends) {
available = !this.isWeekend(date);
}
if (disableWeekdays) {
available = !this.isWeekday(date);
}
if (config.dateArray.length) {
available = this.isInDateArray(date, config.dateArray, DEFAULT_DATE_FORMAT);
}
}
}
return available;
}
isDateInRange(
date: number,
minDate: number,
maxDate: number,
startDate: number,
endDate: number,
monthStartDate: number,
monthEndDate: number,
available: boolean,
config: Config
): boolean {
let inRange = false;
const type: string = config.type;
const singleDatePicker: boolean = config.singleDatePicker;
if (!singleDatePicker) {
if (type === "daily") {
minDate = monthStartDate;
maxDate = monthEndDate;
}
if (date >= startDate && date <= endDate && date >= minDate && date <= maxDate) {
if (available) {
inRange = true;
}
}
}
return inRange;
}
isDateActive(date: number, startDate: number, endDate: number, side: string): boolean {
return (date === startDate && side === "left") || (date === endDate && side === "right");
}
isDateToday(dateMs: number, config): boolean {
const todayDate: string = moment().format(DEFAULT_DATE_FORMAT);
const type: string = config.type;
const { firstDay, lastDay } = this.getFirstLastDay(todayDate, type);
const firstDayMs: number = moment(firstDay, DEFAULT_DATE_FORMAT).valueOf();
const lastDayMs: number = moment(lastDay, DEFAULT_DATE_FORMAT).valueOf();
return dateMs >= firstDayMs && dateMs <= lastDayMs;
}
isWeekday(date: number, format?: string): boolean {
return !this.isWeekend(date, format);
}
isWeekend(date: number, format?: string): boolean {
if (!format) {
format = null;
}
const day = moment(date, format).day();
return day === 0 || day === 6;
}
isInDateArray(date: number, dateArray: any[], format?: string): boolean {
if (!format) {
format = null;
}
return dateArray.find((d) => moment(d, format).valueOf() === date) !== undefined;
}
getCalendarRowVariables(options: RowOptions): RowVariables {
const variables: RowVariables = {
rowNumber: "",
columns: 0
};
const type: string = options.type;
const monthStartWeekNumber: number = options.monthStartWeekNumber;
const dateRows: number = options.dateRows;
const year = `${options.year}`;
if (type === "daily") {
variables.rowNumber = `${monthStartWeekNumber + dateRows}`;
variables.columns = 6;
} else if (type === "weekly") {
variables.rowNumber = ``;
variables.columns = 6;
} else if (type === "monthly") {
variables.rowNumber = `${dateRows + 1}`;
variables.columns = 2;
} else if (type === "quarterly") {
variables.rowNumber = year.charAt(dateRows);
variables.columns = 0;
} else if (type === "yearly") {
variables.rowNumber = "";
variables.columns = 0;
}
return variables;
}
getCalendarRowItemVariables(options: RowItemOptions): RowItemVariables {
const { type, monthStartWeekNumber, yearStartDate, year, rowItem, dateRows, columns } = options;
const itemCount: number = rowItem + dateRows * columns + dateRows;
let currentItemDate = "";
let rowItemText = "";
if (type === "daily") {
if (!isNil(monthStartWeekNumber) && !isNil(dateRows) && !isNil(year)) {
const yearStartDateDaily = moment()
.year(year)
.startOf("year")
.format(DEFAULT_DATE_FORMAT);
currentItemDate = moment(yearStartDateDaily, DEFAULT_DATE_FORMAT)
.add(monthStartWeekNumber + dateRows - 1, "week")
.startOf("week")
.add(rowItem, "day")
.format(DEFAULT_DATE_FORMAT);
rowItemText = moment(currentItemDate, DEFAULT_DATE_FORMAT).format("D");
}
} else if (type === "weekly") {
if (!isNil(yearStartDate) && !isNil(itemCount)) {
currentItemDate = moment(yearStartDate, DEFAULT_DATE_FORMAT)
.add(itemCount, "week")
.endOf("week")
.format(DEFAULT_DATE_FORMAT);
const weekNumber: any = itemCount + 1;
rowItemText = `W${weekNumber}`;
}
} else if (type === "monthly") {
if (!isNil(itemCount) && !isNil(year)) {
currentItemDate = moment()
.year(year)
.month(itemCount)
.endOf("month")
.format(DEFAULT_DATE_FORMAT);
rowItemText = moment(currentItemDate, DEFAULT_DATE_FORMAT).format("MMM");
}
} else if (type === "quarterly") {
if (!isNil(itemCount) && !isNil(year)) {
currentItemDate = moment()
.year(year)
.quarter(itemCount + 1)
.endOf("quarter")
.format(DEFAULT_DATE_FORMAT);
rowItemText = `Quarter ${itemCount + 1}`;
}
}
const { firstDay, lastDay } = this.getFirstLastDay(currentItemDate, type);
return {
itemCount,
currentItemDate,
rowItemText,
firstDay,
lastDay
};
}
isRowIemValid(options: RowOptions): boolean {
let valid = false;
const type: string = options.type;
const year: string = options.year;
const itemCount: number = options.itemCount;
const validWeekCount: number = this.getYearlyWeekCount(year);
if (type === "daily") {
valid = true;
} else if (type === "weekly") {
if (itemCount < validWeekCount) {
valid = true;
}
} else if (type === "monthly") {
valid = true;
} else if (type === "quarterly") {
valid = true;
}
return valid;
}
formatStartDate(config: Config, returnFormat: string): string {
const startDate: string | number = config ? config.startDate : null;
const type: string = config ? config.type : "";
let formattedStartDate: string = null;
if (startDate) {
formattedStartDate = moment(startDate, DEFAULT_DATE_FORMAT)
.startOf(MOMENT_CONVERSION_MAP[type])
.format(returnFormat);
}
return formattedStartDate;
}
getSelectedYear(date: string | number): number {
return moment(date, DEFAULT_DATE_FORMAT).format("YYYY");
}
getFirstLastDay(date: string, type: string): { firstDay: string; lastDay: string } {
let firstDay = "";
let lastDay = "";
if (type === "daily") {
firstDay = lastDay = date;
} else if (type === "weekly") {
firstDay = moment(date, DEFAULT_DATE_FORMAT)
.startOf("week")
.format(DEFAULT_DATE_FORMAT);
lastDay = moment(date, DEFAULT_DATE_FORMAT)
.endOf("week")
.format(DEFAULT_DATE_FORMAT);
} else if (type === "monthly") {
firstDay = moment(date, DEFAULT_DATE_FORMAT)
.startOf("month")
.format(DEFAULT_DATE_FORMAT);
lastDay = moment(date, DEFAULT_DATE_FORMAT)
.endOf("month")
.format(DEFAULT_DATE_FORMAT);
} else if (type === "quarterly") {
firstDay = moment(date, DEFAULT_DATE_FORMAT)
.startOf("quarter")
.format(DEFAULT_DATE_FORMAT);
lastDay = moment(date, DEFAULT_DATE_FORMAT)
.endOf("quarter")
.format(DEFAULT_DATE_FORMAT);
} else if (type === "yearly") {
firstDay = moment(date, DEFAULT_DATE_FORMAT)
.startOf("year")
.format(DEFAULT_DATE_FORMAT);
lastDay = moment(date, DEFAULT_DATE_FORMAT)
.endOf("year")
.format(DEFAULT_DATE_FORMAT);
}
return { firstDay, lastDay };
}
getZoneDate(tz: string, format: string, date?: string): Moment {
let _date: number = moment().valueOf();
if (date) {
_date = moment(date, format)
.startOf("day")
.valueOf();
}
const today = new Date(_date).toLocaleString("en-US", {
timeZone: TZ_NAMES[tz]
});
return moment(today, "MM/DD/YYYY, hh:mm:ss A");
}
getZoneToday(tz: string, viewDateFormat: string): string {
const today: Moment = this.getZoneDate(tz, viewDateFormat);
return moment(today).format(`${viewDateFormat} hh:mm A`);
}
formatToZoneDate(tz: string, format: string, date: string): string {
const formattedDate: Moment = this.getZoneDate(tz, format, date);
return moment(formattedDate).format(`${format}`);
}
convertToViewTimeItem(item: string | number): string {
let stringified_item = item + "";
if (stringified_item.length === 1) {
stringified_item = `0${stringified_item}`;
}
return stringified_item;
}
getWeekNumber(date: string): string | number {
if (date) {
const year: number = moment(date, "YYYY-MM-DD").year();
const month: number = moment(date, "YYYY-MM-DD").month();
const day: number = Number(moment(date, "YYYY-MM-DD").format("D"));
const yearStartms: Date = new Date(year, 0, 1);
const datems: Date = new Date(year, month, day);
return Math.ceil(((datems.getTime() - yearStartms.getTime()) / 86400000 + yearStartms.getDay() + 1) / 7);
} else {
console.warn(`
WARN_NGX_DATETIME_RANGE_PICKER | getWeekNumber:
Invalid date
`);
return getNotAvailableText();
}
}
iterateOverDateObj(dates: CalendarSides, func) {
for (const side in dates) {
if (side) {
const sideDates = dates[side];
sideDates.itemRows.forEach((rows) => {
rows.items.forEach((rowItem) => {
func(rowItem);
});
});
}
}
}
getCalendarColspan(type: string): number {
if (type === "daily") {
return 6;
} else if (type === "weekly") {
return 8;
} else if (type === "monthly") {
return 3;
} else if (type === "quarterly") {
return 1;
} else if (type === "yearly") {
return 1;
}
}
getCalendarRowItemColspan(type: string): number {
if (type === "monthly") {
return 3;
} else if (type === "quarterly") {
return 6;
} else if (type === "yearly") {
return 6;
}
}
getDateCharacteristics(config: Config, state: State, date: string, month: string, side: string): DateCharacteristics {
const currentDate: number = moment(date, DEFAULT_DATE_FORMAT)
.startOf("day")
.valueOf();
let _date: string = this.formatDateToDefaultFormat(config.minDate, DEFAULT_DATE_FORMAT);
const minDate: number = moment(_date, DEFAULT_DATE_FORMAT)
.startOf("day")
.valueOf();
_date = this.formatDateToDefaultFormat(config.maxDate, DEFAULT_DATE_FORMAT);
const maxDate: number = moment(_date, DEFAULT_DATE_FORMAT)
.startOf("day")
.valueOf();
_date = this.formatDateToDefaultFormat(config.startDate, DEFAULT_DATE_FORMAT);
const startDate: number = moment(_date, DEFAULT_DATE_FORMAT)
.startOf("day")
.valueOf();
_date = this.formatDateToDefaultFormat(config.endDate, DEFAULT_DATE_FORMAT);
const endDate: number = moment(_date, DEFAULT_DATE_FORMAT)
.startOf("day")
.valueOf();
const currentMonthStartDate: number = moment(month, "MMM YYYY")
.startOf("month")
.startOf("day")
.valueOf();
const currentMonthEndDate: number = moment(month, "MMM YYYY")
.endOf("month")
.startOf("day")
.valueOf();
const available: boolean = this.isDateAvailable(
currentDate,
minDate,
maxDate,
startDate,
endDate,
currentMonthStartDate,
currentMonthEndDate,
config
);
const inRange: boolean = this.isDateInRange(
currentDate,
minDate,
maxDate,
startDate,
endDate,
currentMonthStartDate,
currentMonthEndDate,
available,
config
);
const active: boolean = this.isDateActive(currentDate, startDate, endDate, side);
const today: boolean = this.isDateToday(currentDate, config);
// Active
if (currentDate === startDate && side === "left") {
state.activeStartDate = date;
} else if (currentDate === endDate && side === "right") {
state.activeEndDate = date;
}
return { available, inRange, active, today };
}
getLabelProps(
state: State,
calendarType: string,
side: string
): { label: string; labelFormat: string; type: string } {
let label: string, labelFormat: string, type: string;
if (calendarType === "daily") {
label = `${state.selectedMonth[side]} ${state.selectedYear[side]}`;
labelFormat = "MMM YYYY";
type = "month";
} else {
label = `${state.selectedYear[side]}`;
labelFormat = "YYYY";
type = "year";
}
return { label, labelFormat, type };
}
}
| NgxDatetimeRangePickerService |
conv-only.ts | import * as tf from '@tensorflow/tfjs';
import {
saveModel,
loadModel
} from '../../../lib/api';
const numFilters = 32;
const defaultNumLayers = 4;
const numEpochs = 10;
interface Options {
height: number;
width: number;
depth: number;
}
export default class Network {
private model: tf.LayersModel;
private height: number;
private width: number;
private depth: number;
constructor(options: Options) {
this.height = options.height;
this.width = options.width;
this.depth = options.depth;
this.model = this.createModel();
this.compile();
}
private createModel(numLayers=defaultNumLayers){
const input = tf.input({
shape: [this.height, this.width, this.depth]
});
let network = tf.layers.conv2d({
kernelSize: 2,
filters: numFilters,
strides: 1,
padding: 'same',
useBias: false
}).apply(input) as tf.SymbolicTensor;
network = tf.layers.batchNormalization({
axis: 3
}).apply(network) as tf.SymbolicTensor;
network = tf.layers.activation({
activation: 'relu'
}).apply(network) as tf.SymbolicTensor;
network = tf.layers.conv2d({ | padding: 'same',
useBias: false
}).apply(network) as tf.SymbolicTensor;
network = tf.layers.batchNormalization({
axis: 3
}).apply(network) as tf.SymbolicTensor;
network = tf.layers.activation({
activation: 'relu'
}).apply(network) as tf.SymbolicTensor;
let policy = tf.layers.conv2d({
kernelSize: 1,
filters: 1,
strides: 1,
padding: 'same',
useBias: false
}).apply(network) as tf.SymbolicTensor;
policy = tf.layers.batchNormalization({
axis: 3
}).apply(policy) as tf.SymbolicTensor;
policy = tf.layers.activation({
activation: 'relu'
}).apply(policy) as tf.SymbolicTensor;
policy = tf.layers.flatten(
).apply(policy) as tf.SymbolicTensor;
policy = tf.layers.dense({
units: this.height * this.width
}).apply(policy) as tf.SymbolicTensor;
policy = tf.layers.softmax(
).apply(policy) as tf.SymbolicTensor;
let reward = tf.layers.conv2d({
kernelSize: 2,
filters: 1,
strides: 1,
padding: 'valid',
useBias: false
}).apply(network) as tf.SymbolicTensor;
reward = tf.layers.batchNormalization({
axis: 3
}).apply(reward) as tf.SymbolicTensor;
reward = tf.layers.activation({
activation: 'relu'
}).apply(reward) as tf.SymbolicTensor;
reward = tf.layers.flatten(
).apply(reward) as tf.SymbolicTensor;
reward = tf.layers.dense({
units: 1
}).apply(reward) as tf.SymbolicTensor;
reward = tf.layers.activation({
activation: 'tanh'
}).apply(reward) as tf.SymbolicTensor;
const model = tf.model(
{
inputs: input,
outputs: [
policy,
reward
]
}
);
return model;
};
private compile() {
const optimizer = tf.train.adam(0.001);
// const optimizer = tf.train.sgd(0.1);
this.model.compile({
optimizer: optimizer,
loss: [
'categoricalCrossentropy',
'meanSquaredError'
],
metrics: ['accuracy']
});
}
async fit(
inputs: number[][][][],
outputs: [number[], number][]
){
// const batchSize = inputs.length;
const xsTensor = tf.tensor4d(inputs);
const policiesTensor = tf.tensor2d(outputs.map(
output => output[0]
));
const rewardsTensor = tf.tensor2d(outputs.map(
output => [output[1]]
));
const ysTensors = [
policiesTensor,
rewardsTensor
];
const trainingHistory = await this.model.fit(
xsTensor,
ysTensors,
{
batchSize: 128,
epochs: numEpochs,
shuffle: true,
validationSplit: 0.01,
callbacks: {
onEpochEnd: console.log
}
}
);
xsTensor.dispose();
policiesTensor.dispose();
rewardsTensor.dispose();
console.log(trainingHistory);
const loss = trainingHistory.history.loss[numEpochs - 1] as number;
return loss;
}
async predict(inputs: number[][][][]) {
const inputsTensor = tf.tensor4d(inputs);
const [ policiesTensor, rewardsTensor ] = this.model.predict(
inputsTensor
) as [tf.Tensor2D, tf.Tensor2D];
const policies = await policiesTensor.array();
const rewards = await rewardsTensor.array();
inputsTensor.dispose();
policiesTensor.dispose();
rewardsTensor.dispose();
const outputs = policies.map(
(policy, i) => [policy, rewards[i][0]] as [number[], number]
);
return outputs;
}
async save(gameName: string, modelName: string) {
await saveModel(
this.model,
gameName,
modelName
);
}
async load(gameName: string, modelName: string) {
this.model.dispose();
this.model = await loadModel(gameName, modelName);
this.compile();
}
addLayer() {
}
}; | kernelSize: 2,
filters: numFilters,
strides: 1, |
supporting_functions.py | import numpy as np
import cv2
from PIL import Image
from io import BytesIO, StringIO
import base64
import time
# Define a function to convert telemetry strings to float independent of decimal convention
def convert_to_float(string_to_convert):
if ',' in string_to_convert:
float_value = np.float(string_to_convert.replace(',','.'))
else:
float_value = np.float(string_to_convert)
return float_value
def update_rover(Rover, data):
# Initialize start time and sample positions
if Rover.start_time == None:
Rover.start_time = time.time()
Rover.total_time = 0
samples_xpos = np.int_([convert_to_float(pos.strip()) for pos in data["samples_x"].split(';')])
samples_ypos = np.int_([convert_to_float(pos.strip()) for pos in data["samples_y"].split(';')])
Rover.samples_pos = (samples_xpos, samples_ypos)
Rover.samples_to_find = np.int(data["sample_count"])
# Or just update elapsed time
else:
tot_time = time.time() - Rover.start_time
if np.isfinite(tot_time):
Rover.total_time = tot_time
# Print out the fields in the telemetry data dictionary
print(data.keys())
# The current speed of the rover in m/s
Rover.vel = convert_to_float(data["speed"])
# The current position of the rover
Rover.pos = [convert_to_float(pos.strip()) for pos in data["position"].split(';')]
# The current yaw angle of the rover
Rover.yaw = convert_to_float(data["yaw"])
# The current yaw angle of the rover
Rover.pitch = convert_to_float(data["pitch"])
# The current yaw angle of the rover
Rover.roll = convert_to_float(data["roll"])
# The current throttle setting
Rover.throttle = convert_to_float(data["throttle"])
# The current steering angle
Rover.steer = convert_to_float(data["steering_angle"])
# Near sample flag
Rover.near_sample = np.int(data["near_sample"])
# Picking up flag
Rover.picking_up = np.int(data["picking_up"])
# Update number of rocks collected
Rover.samples_collected = Rover.samples_to_find - np.int(data["sample_count"])
print('speed =',Rover.vel, 'position =', Rover.pos, 'throttle =',
Rover.throttle, 'steer_angle =', Rover.steer, 'near_sample:', Rover.near_sample,
'picking_up:', data["picking_up"], 'sending pickup:', Rover.send_pickup,
'total time:', Rover.total_time, 'samples remaining:', data["sample_count"],
'samples collected:', Rover.samples_collected)
# Get the current image from the center camera of the rover
imgString = data["image"]
image = Image.open(BytesIO(base64.b64decode(imgString)))
Rover.img = np.asarray(image)
# Return updated Rover and separate image for optional saving
return Rover, image
# Define a function to create display output given worldmap results
def create_output_images(Rover):
# Create a scaled map for plotting and clean up obs/nav pixels a bit
| if np.max(Rover.worldmap[:,:,2]) > 0:
nav_pix = Rover.worldmap[:,:,2] > 0
navigable = Rover.worldmap[:,:,2] * (255 / np.mean(Rover.worldmap[nav_pix, 2]))
else:
navigable = Rover.worldmap[:,:,2]
if np.max(Rover.worldmap[:,:,0]) > 0:
obs_pix = Rover.worldmap[:,:,0] > 0
obstacle = Rover.worldmap[:,:,0] * (255 / np.mean(Rover.worldmap[obs_pix, 0]))
else:
obstacle = Rover.worldmap[:,:,0]
likely_nav = navigable >= obstacle
obstacle[likely_nav] = 0
plotmap = np.zeros_like(Rover.worldmap)
plotmap[:, :, 0] = obstacle
plotmap[:, :, 2] = navigable
plotmap = plotmap.clip(0, 255)
# Overlay obstacle and navigable terrain map with ground truth map
map_add = cv2.addWeighted(plotmap, 1, Rover.ground_truth, 0.5, 0)
# Check whether any rock detections are present in worldmap
rock_world_pos = Rover.worldmap[:,:,1].nonzero()
# If there are, we'll step through the known sample positions
# to confirm whether detections are real
samples_located = 0
if rock_world_pos[0].any():
rock_size = 2
for idx in range(len(Rover.samples_pos[0])):
test_rock_x = Rover.samples_pos[0][idx]
test_rock_y = Rover.samples_pos[1][idx]
rock_sample_dists = np.sqrt((test_rock_x - rock_world_pos[1])**2 + \
(test_rock_y - rock_world_pos[0])**2)
# If rocks were detected within 3 meters of known sample positions
# consider it a success and plot the location of the known
# sample on the map
if np.min(rock_sample_dists) < 3:
samples_located += 1
Rover.samples_located = samples_located
map_add[test_rock_y-rock_size:test_rock_y+rock_size,
test_rock_x-rock_size:test_rock_x+rock_size, :] = 255
# Calculate some statistics on the map results
# First get the total number of pixels in the navigable terrain map
tot_nav_pix = np.float(len((plotmap[:,:,2].nonzero()[0])))
# Next figure out how many of those correspond to ground truth pixels
good_nav_pix = np.float(len(((plotmap[:,:,2] > 0) & (Rover.ground_truth[:,:,1] > 0)).nonzero()[0]))
# Next find how many do not correspond to ground truth pixels
bad_nav_pix = np.float(len(((plotmap[:,:,2] > 0) & (Rover.ground_truth[:,:,1] == 0)).nonzero()[0]))
# Grab the total number of map pixels
tot_map_pix = np.float(len((Rover.ground_truth[:,:,1].nonzero()[0])))
# Calculate the percentage of ground truth map that has been successfully found
perc_mapped = round(100*good_nav_pix/tot_map_pix, 1)
# Calculate the number of good map pixel detections divided by total pixels
# found to be navigable terrain
if tot_nav_pix > 0:
fidelity = round(100*good_nav_pix/(tot_nav_pix), 1)
else:
fidelity = 0
# Flip the map for plotting so that the y-axis points upward in the display
map_add = np.flipud(map_add).astype(np.float32)
# Add some text about map and rock sample detection results
cv2.putText(map_add,"Time: "+str(np.round(Rover.total_time, 1))+' s', (0, 10),
cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 255, 255), 1)
cv2.putText(map_add,"Mapped: "+str(perc_mapped)+'%', (0, 25),
cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 255, 255), 1)
cv2.putText(map_add,"Fidelity: "+str(fidelity)+'%', (0, 40),
cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 255, 255), 1)
cv2.putText(map_add,"Rocks", (0, 55),
cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 255, 255), 1)
cv2.putText(map_add," Located: "+str(samples_located), (0, 70),
cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 255, 255), 1)
cv2.putText(map_add," Collected: "+str(Rover.samples_collected), (0, 85),
cv2.FONT_HERSHEY_COMPLEX, 0.4, (255, 255, 255), 1)
# Convert map and vision image to base64 strings for sending to server
pil_img = Image.fromarray(map_add.astype(np.uint8))
buff = BytesIO()
pil_img.save(buff, format="JPEG")
encoded_string1 = base64.b64encode(buff.getvalue()).decode("utf-8")
pil_img = Image.fromarray(Rover.vision_image.astype(np.uint8))
buff = BytesIO()
pil_img.save(buff, format="JPEG")
encoded_string2 = base64.b64encode(buff.getvalue()).decode("utf-8")
return encoded_string1, encoded_string2 |
|
gen_RtcTrackEventInit.rs | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "wasm-bindgen" {
# [wasm_bindgen (extends = :: js_sys :: Object , js_name = RTCTrackEventInit)]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `RtcTrackEventInit` dictionary."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcTrackEventInit`*"]
pub type RtcTrackEventInit;
}
impl RtcTrackEventInit {
#[cfg(all(
feature = "MediaStreamTrack",
feature = "RtcRtpReceiver",
feature = "RtcRtpTransceiver",
))]
#[doc = "Construct a new `RtcTrackEventInit`."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MediaStreamTrack`, `RtcRtpReceiver`, `RtcRtpTransceiver`, `RtcTrackEventInit`*"]
pub fn new(
receiver: &RtcRtpReceiver,
track: &MediaStreamTrack,
transceiver: &RtcRtpTransceiver,
) -> Self {
#[allow(unused_mut)]
let mut ret: Self = ::wasm_bindgen::JsCast::unchecked_into(::js_sys::Object::new());
ret.receiver(receiver);
ret.track(track);
ret.transceiver(transceiver);
ret
}
#[doc = "Change the `bubbles` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcTrackEventInit`*"]
pub fn bubbles(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("bubbles"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `cancelable` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcTrackEventInit`*"]
pub fn cancelable(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("cancelable"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `composed` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcTrackEventInit`*"]
pub fn composed(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("composed"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[cfg(feature = "RtcRtpReceiver")]
#[doc = "Change the `receiver` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcRtpReceiver`, `RtcTrackEventInit`*"]
pub fn receiver(&mut self, val: &RtcRtpReceiver) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("receiver"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `streams` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcTrackEventInit`*"]
pub fn streams(&mut self, val: &::wasm_bindgen::JsValue) -> &mut Self |
#[cfg(feature = "MediaStreamTrack")]
#[doc = "Change the `track` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `MediaStreamTrack`, `RtcTrackEventInit`*"]
pub fn track(&mut self, val: &MediaStreamTrack) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(self.as_ref(), &JsValue::from("track"), &JsValue::from(val));
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[cfg(feature = "RtcRtpTransceiver")]
#[doc = "Change the `transceiver` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `RtcRtpTransceiver`, `RtcTrackEventInit`*"]
pub fn transceiver(&mut self, val: &RtcRtpTransceiver) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("transceiver"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
}
| {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("streams"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
} |
reorder_fields.rs | use itertools::Itertools;
use rustc_hash::FxHashMap;
use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct};
use ide_db::RootDatabase;
use syntax::{algo, ast, match_ast, AstNode, SyntaxKind, SyntaxKind::*, SyntaxNode};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: reorder_fields
//
// Reorder the fields of record literals and record patterns in the same order as in
// the definition.
//
// ```
// struct Foo {foo: i32, bar: i32};
// const test: Foo = $0Foo {bar: 0, foo: 1}
// ```
// ->
// ```
// struct Foo {foo: i32, bar: i32};
// const test: Foo = Foo {foo: 1, bar: 0}
// ```
//
pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
reorder::<ast::RecordExpr>(acc, ctx).or_else(|| reorder::<ast::RecordPat>(acc, ctx))
}
fn reorder<R: AstNode>(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let record = ctx.find_node_at_offset::<R>()?;
let path = record.syntax().children().find_map(ast::Path::cast)?;
let ranks = compute_fields_ranks(&path, &ctx)?;
let fields = get_fields(&record.syntax());
let sorted_fields = sorted_by_rank(&fields, |node| {
*ranks.get(&get_field_name(node)).unwrap_or(&usize::max_value())
});
if sorted_fields == fields {
cov_mark::hit!(reorder_sorted_fields);
return None;
}
let target = record.syntax().text_range();
acc.add(
AssistId("reorder_fields", AssistKind::RefactorRewrite),
"Reorder record fields",
target,
|edit| {
let mut rewriter = algo::SyntaxRewriter::default();
for (old, new) in fields.iter().zip(&sorted_fields) {
rewriter.replace(old, new);
}
edit.rewrite(rewriter);
},
)
}
fn get_fields_kind(node: &SyntaxNode) -> Vec<SyntaxKind> {
match node.kind() {
RECORD_EXPR => vec![RECORD_EXPR_FIELD],
RECORD_PAT => vec![RECORD_PAT_FIELD, IDENT_PAT],
_ => vec![], | }
fn get_field_name(node: &SyntaxNode) -> String {
let res = match_ast! {
match node {
ast::RecordExprField(field) => field.field_name().map(|it| it.to_string()),
ast::RecordPatField(field) => field.field_name().map(|it| it.to_string()),
_ => None,
}
};
res.unwrap_or_default()
}
fn get_fields(record: &SyntaxNode) -> Vec<SyntaxNode> {
let kinds = get_fields_kind(record);
record.children().flat_map(|n| n.children()).filter(|n| kinds.contains(&n.kind())).collect()
}
fn sorted_by_rank(
fields: &[SyntaxNode],
get_rank: impl Fn(&SyntaxNode) -> usize,
) -> Vec<SyntaxNode> {
fields.iter().cloned().sorted_by_key(get_rank).collect()
}
fn struct_definition(path: &ast::Path, sema: &Semantics<RootDatabase>) -> Option<Struct> {
match sema.resolve_path(path) {
Some(PathResolution::Def(ModuleDef::Adt(Adt::Struct(s)))) => Some(s),
_ => None,
}
}
fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> {
Some(
struct_definition(path, &ctx.sema)?
.fields(ctx.db())
.iter()
.enumerate()
.map(|(idx, field)| (field.name(ctx.db()).to_string(), idx))
.collect(),
)
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn reorder_sorted_fields() {
cov_mark::check!(reorder_sorted_fields);
check_assist_not_applicable(
reorder_fields,
r#"
struct Foo {
foo: i32,
bar: i32,
}
const test: Foo = $0Foo { foo: 0, bar: 0 };
"#,
)
}
#[test]
fn trivial_empty_fields() {
check_assist_not_applicable(
reorder_fields,
r#"
struct Foo {};
const test: Foo = $0Foo {}
"#,
)
}
#[test]
fn reorder_struct_fields() {
check_assist(
reorder_fields,
r#"
struct Foo {foo: i32, bar: i32};
const test: Foo = $0Foo {bar: 0, foo: 1}
"#,
r#"
struct Foo {foo: i32, bar: i32};
const test: Foo = Foo {foo: 1, bar: 0}
"#,
)
}
#[test]
fn reorder_struct_pattern() {
check_assist(
reorder_fields,
r#"
struct Foo { foo: i64, bar: i64, baz: i64 }
fn f(f: Foo) -> {
match f {
$0Foo { baz: 0, ref mut bar, .. } => (),
_ => ()
}
}
"#,
r#"
struct Foo { foo: i64, bar: i64, baz: i64 }
fn f(f: Foo) -> {
match f {
Foo { ref mut bar, baz: 0, .. } => (),
_ => ()
}
}
"#,
)
}
#[test]
fn reorder_with_extra_field() {
check_assist(
reorder_fields,
r#"
struct Foo {
foo: String,
bar: String,
}
impl Foo {
fn new() -> Foo {
let foo = String::new();
$0Foo {
bar: foo.clone(),
extra: "Extra field",
foo,
}
}
}
"#,
r#"
struct Foo {
foo: String,
bar: String,
}
impl Foo {
fn new() -> Foo {
let foo = String::new();
Foo {
foo,
bar: foo.clone(),
extra: "Extra field",
}
}
}
"#,
)
}
} | } |
issue-42312.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ops::Deref;
pub trait Foo {
fn baz(_: Self::Target) where Self: Deref {}
//~^ ERROR the size for values of type
}
pub fn | (_: ToString) {}
//~^ ERROR the size for values of type
fn main() { }
| f |
cluster_client.go | /*
Copyright (c) 2020 Red Hat, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// IMPORTANT: This file has been generated automatically, refrain from modifying it manually as all
// your changes will be lost when the file is generated again.
package v1 // github.com/openshift-online/ocm-sdk-go/clustersmgmt/v1
import (
"bytes"
"context"
"io"
"io/ioutil"
"net/http"
"net/url"
"path"
"time"
jsoniter "github.com/json-iterator/go"
"github.com/openshift-online/ocm-sdk-go/errors"
"github.com/openshift-online/ocm-sdk-go/helpers"
)
// ClusterClient is the client of the 'cluster' resource.
//
// Manages a specific cluster.
type ClusterClient struct {
transport http.RoundTripper
path string
}
// NewClusterClient creates a new client for the 'cluster'
// resource using the given transport to send the requests and receive the
// responses.
func NewClusterClient(transport http.RoundTripper, path string) *ClusterClient {
return &ClusterClient{
transport: transport,
path: path,
}
}
// Delete creates a request for the 'delete' method.
//
// Deletes the cluster.
func (c *ClusterClient) Delete() *ClusterDeleteRequest {
return &ClusterDeleteRequest{
transport: c.transport,
path: c.path,
}
}
// Get creates a request for the 'get' method.
//
// Retrieves the details of the cluster.
func (c *ClusterClient) Get() *ClusterGetRequest {
return &ClusterGetRequest{
transport: c.transport,
path: c.path,
}
}
// Hibernate creates a request for the 'hibernate' method.
//
// Initiates cluster hibernation. While hibernating a cluster will not consume any cloud provider infrastructure
// but will be counted for quota.
func (c *ClusterClient) Hibernate() *ClusterHibernateRequest {
return &ClusterHibernateRequest{
transport: c.transport,
path: path.Join(c.path, "hibernate"),
}
}
// Resume creates a request for the 'resume' method.
//
// Resumes from Hibernation.
func (c *ClusterClient) Resume() *ClusterResumeRequest {
return &ClusterResumeRequest{
transport: c.transport,
path: path.Join(c.path, "resume"),
}
}
// Update creates a request for the 'update' method.
//
// Updates the cluster.
func (c *ClusterClient) Update() *ClusterUpdateRequest {
return &ClusterUpdateRequest{
transport: c.transport,
path: c.path,
}
}
// AWSInfrastructureAccessRoleGrants returns the target 'AWS_infrastructure_access_role_grants' resource.
//
// Reference to the resource that manages the collection of AWS infrastructure
// access role grants on this cluster.
func (c *ClusterClient) AWSInfrastructureAccessRoleGrants() *AWSInfrastructureAccessRoleGrantsClient {
return NewAWSInfrastructureAccessRoleGrantsClient(
c.transport,
path.Join(c.path, "aws_infrastructure_access_role_grants"),
)
}
// AddonInquiries returns the target 'addon_inquiries' resource.
//
// Reference to the resource that manages the collection of the add-on inquiries on this cluster.
func (c *ClusterClient) AddonInquiries() *AddonInquiriesClient {
return NewAddonInquiriesClient(
c.transport,
path.Join(c.path, "addon_inquiries"),
)
}
// Addons returns the target 'add_on_installations' resource.
//
// Reference to the resource that manages the collection of add-ons installed on this cluster.
func (c *ClusterClient) Addons() *AddOnInstallationsClient {
return NewAddOnInstallationsClient(
c.transport,
path.Join(c.path, "addons"),
)
}
// Clusterdeployment returns the target 'clusterdeployment' resource.
//
// Reference to the resource that manages the cluster deployment.
func (c *ClusterClient) Clusterdeployment() *ClusterdeploymentClient {
return NewClusterdeploymentClient(
c.transport,
path.Join(c.path, "clusterdeployment"),
)
}
// Credentials returns the target 'credentials' resource.
//
// Reference to the resource that manages the credentials of the cluster.
func (c *ClusterClient) Credentials() *CredentialsClient {
return NewCredentialsClient(
c.transport,
path.Join(c.path, "credentials"),
)
}
// ExternalConfiguration returns the target 'external_configuration' resource.
//
// Reference to the resource that manages the external configuration.
func (c *ClusterClient) ExternalConfiguration() *ExternalConfigurationClient {
return NewExternalConfigurationClient(
c.transport,
path.Join(c.path, "external_configuration"),
)
}
// Groups returns the target 'groups' resource.
//
// Reference to the resource that manages the collection of groups.
func (c *ClusterClient) Groups() *GroupsClient {
return NewGroupsClient(
c.transport,
path.Join(c.path, "groups"),
)
}
// IdentityProviders returns the target 'identity_providers' resource.
//
// Reference to the resource that manages the collection of identity providers.
func (c *ClusterClient) IdentityProviders() *IdentityProvidersClient {
return NewIdentityProvidersClient(
c.transport,
path.Join(c.path, "identity_providers"),
)
}
// Ingresses returns the target 'ingresses' resource.
//
// Reference to the resource that manages the collection of ingress resources.
func (c *ClusterClient) Ingresses() *IngressesClient {
return NewIngressesClient(
c.transport,
path.Join(c.path, "ingresses"),
)
}
// LimitedSupportReasons returns the target 'limited_support_reasons' resource.
//
// Reference to cluster limited support reasons.
func (c *ClusterClient) LimitedSupportReasons() *LimitedSupportReasonsClient {
return NewLimitedSupportReasonsClient(
c.transport,
path.Join(c.path, "limited_support_reasons"),
)
}
// Logs returns the target 'logs' resource.
//
// Reference to the resource that manages the collection of logs of the cluster.
func (c *ClusterClient) Logs() *LogsClient {
return NewLogsClient(
c.transport,
path.Join(c.path, "logs"),
)
}
// MachinePools returns the target 'machine_pools' resource.
//
// Reference to the resource that manages the collection of machine pool resources.
func (c *ClusterClient) MachinePools() *MachinePoolsClient {
return NewMachinePoolsClient(
c.transport,
path.Join(c.path, "machine_pools"),
)
}
// MetricQueries returns the target 'metric_queries' resource.
//
// Reference to the resource that manages metrics queries for the cluster.
func (c *ClusterClient) MetricQueries() *MetricQueriesClient {
return NewMetricQueriesClient(
c.transport,
path.Join(c.path, "metric_queries"),
)
}
// Product returns the target 'product' resource.
//
// Reference to the resource that manages the product type of the cluster
func (c *ClusterClient) Product() *ProductClient {
return NewProductClient(
c.transport,
path.Join(c.path, "product"),
)
}
// ProvisionShard returns the target 'provision_shard' resource.
//
// Reference to the resource that manages the cluster's provision shard.
func (c *ClusterClient) ProvisionShard() *ProvisionShardClient {
return NewProvisionShardClient(
c.transport,
path.Join(c.path, "provision_shard"),
)
}
// Resources returns the target 'resources' resource.
//
// Reference to cluster resources.
func (c *ClusterClient) Resources() *ResourcesClient {
return NewResourcesClient(
c.transport,
path.Join(c.path, "resources"),
)
}
// Status returns the target 'cluster_status' resource.
//
// Reference to the resource that manages the detailed status of the cluster.
func (c *ClusterClient) Status() *ClusterStatusClient {
return NewClusterStatusClient(
c.transport,
path.Join(c.path, "status"),
)
}
// UpgradePolicies returns the target 'upgrade_policies' resource.
//
// Reference to the resource that manages the collection of upgrade policies defined for this cluster.
func (c *ClusterClient) UpgradePolicies() *UpgradePoliciesClient {
return NewUpgradePoliciesClient(
c.transport,
path.Join(c.path, "upgrade_policies"),
)
}
// ClusterPollRequest is the request for the Poll method.
type ClusterPollRequest struct {
request *ClusterGetRequest
interval time.Duration
statuses []int
predicates []func(interface{}) bool
}
// Parameter adds a query parameter to all the requests that will be used to retrieve the object.
func (r *ClusterPollRequest) Parameter(name string, value interface{}) *ClusterPollRequest {
r.request.Parameter(name, value)
return r
}
// Header adds a request header to all the requests that will be used to retrieve the object.
func (r *ClusterPollRequest) Header(name string, value interface{}) *ClusterPollRequest {
r.request.Header(name, value)
return r
}
// Interval sets the polling interval. This parameter is mandatory and must be greater than zero.
func (r *ClusterPollRequest) Interval(value time.Duration) *ClusterPollRequest {
r.interval = value
return r
}
// Status set the expected status of the response. Multiple values can be set calling this method
// multiple times. The response will be considered successful if the status is any of those values.
func (r *ClusterPollRequest) Status(value int) *ClusterPollRequest {
r.statuses = append(r.statuses, value)
return r
}
// Predicate adds a predicate that the response should satisfy be considered successful. Multiple
// predicates can be set calling this method multiple times. The response will be considered successful
// if all the predicates are satisfied.
func (r *ClusterPollRequest) Predicate(value func(*ClusterGetResponse) bool) *ClusterPollRequest {
r.predicates = append(r.predicates, func(response interface{}) bool {
return value(response.(*ClusterGetResponse))
})
return r
}
// StartContext starts the polling loop. Responses will be considered successful if the status is one of
// the values specified with the Status method and if all the predicates specified with the Predicate
// method return nil.
//
// The context must have a timeout or deadline, otherwise this method will immediately return an error.
func (r *ClusterPollRequest) StartContext(ctx context.Context) (response *ClusterPollResponse, err error) {
result, err := helpers.PollContext(ctx, r.interval, r.statuses, r.predicates, r.task)
if result != nil {
response = &ClusterPollResponse{
response: result.(*ClusterGetResponse),
}
}
return
}
// task adapts the types of the request/response types so that they can be used with the generic
// polling function from the helpers package.
func (r *ClusterPollRequest) task(ctx context.Context) (status int, result interface{}, err error) {
response, err := r.request.SendContext(ctx)
if response != nil {
status = response.Status()
result = response
}
return
}
// ClusterPollResponse is the response for the Poll method.
type ClusterPollResponse struct {
response *ClusterGetResponse
}
// Status returns the response status code.
func (r *ClusterPollResponse) Status() int {
if r == nil {
return 0
}
return r.response.Status()
}
// Header returns header of the response.
func (r *ClusterPollResponse) Header() http.Header {
if r == nil {
return nil
}
return r.response.Header()
}
// Error returns the response error.
func (r *ClusterPollResponse) Error() *errors.Error {
if r == nil {
return nil
}
return r.response.Error()
}
// Body returns the value of the 'body' parameter.
//
//
func (r *ClusterPollResponse) Body() *Cluster {
return r.response.Body()
}
// GetBody returns the value of the 'body' parameter and
// a flag indicating if the parameter has a value.
//
//
func (r *ClusterPollResponse) GetBody() (value *Cluster, ok bool) {
return r.response.GetBody()
}
// Poll creates a request to repeatedly retrieve the object till the response has one of a given set
// of states and satisfies a set of predicates.
func (c *ClusterClient) Poll() *ClusterPollRequest {
return &ClusterPollRequest{
request: c.Get(),
}
}
// ClusterDeleteRequest is the request for the 'delete' method.
type ClusterDeleteRequest struct {
transport http.RoundTripper
path string
query url.Values
header http.Header
deprovision *bool
}
// Parameter adds a query parameter.
func (r *ClusterDeleteRequest) Parameter(name string, value interface{}) *ClusterDeleteRequest {
helpers.AddValue(&r.query, name, value)
return r
}
// Header adds a request header.
func (r *ClusterDeleteRequest) Header(name string, value interface{}) *ClusterDeleteRequest {
helpers.AddHeader(&r.header, name, value)
return r
}
// Deprovision sets the value of the 'deprovision' parameter.
//
// If false it will only delete from OCM but not the actual cluster resources.
// false is only allowed for OCP clusters. true by default.
func (r *ClusterDeleteRequest) Deprovision(value bool) *ClusterDeleteRequest {
r.deprovision = &value
return r
}
// Send sends this request, waits for the response, and returns it.
//
// This is a potentially lengthy operation, as it requires network communication.
// Consider using a context and the SendContext method.
func (r *ClusterDeleteRequest) Send() (result *ClusterDeleteResponse, err error) {
return r.SendContext(context.Background())
}
// SendContext sends this request, waits for the response, and returns it.
func (r *ClusterDeleteRequest) SendContext(ctx context.Context) (result *ClusterDeleteResponse, err error) {
query := helpers.CopyQuery(r.query)
if r.deprovision != nil {
helpers.AddValue(&query, "deprovision", *r.deprovision)
}
header := helpers.CopyHeader(r.header)
uri := &url.URL{
Path: r.path,
RawQuery: query.Encode(),
}
request := &http.Request{
Method: "DELETE",
URL: uri,
Header: header,
}
if ctx != nil {
request = request.WithContext(ctx)
}
response, err := r.transport.RoundTrip(request)
if err != nil {
return
}
defer response.Body.Close()
result = &ClusterDeleteResponse{}
result.status = response.StatusCode
result.header = response.Header
if result.status >= 400 {
result.err, err = errors.UnmarshalError(response.Body)
if err != nil {
return
}
err = result.err
return
}
return
}
// ClusterDeleteResponse is the response for the 'delete' method.
type ClusterDeleteResponse struct {
status int
header http.Header
err *errors.Error
}
// Status returns the response status code.
func (r *ClusterDeleteResponse) Status() int {
if r == nil {
return 0
}
return r.status
}
// Header returns header of the response.
func (r *ClusterDeleteResponse) Header() http.Header {
if r == nil {
return nil
}
return r.header
}
// Error returns the response error.
func (r *ClusterDeleteResponse) Error() *errors.Error {
if r == nil {
return nil
}
return r.err
}
// ClusterGetRequest is the request for the 'get' method.
type ClusterGetRequest struct {
transport http.RoundTripper
path string
query url.Values
header http.Header
}
// Parameter adds a query parameter.
func (r *ClusterGetRequest) Parameter(name string, value interface{}) *ClusterGetRequest {
helpers.AddValue(&r.query, name, value)
return r
}
// Header adds a request header.
func (r *ClusterGetRequest) Header(name string, value interface{}) *ClusterGetRequest {
helpers.AddHeader(&r.header, name, value)
return r
}
// Send sends this request, waits for the response, and returns it.
//
// This is a potentially lengthy operation, as it requires network communication.
// Consider using a context and the SendContext method.
func (r *ClusterGetRequest) Send() (result *ClusterGetResponse, err error) {
return r.SendContext(context.Background())
}
// SendContext sends this request, waits for the response, and returns it.
func (r *ClusterGetRequest) SendContext(ctx context.Context) (result *ClusterGetResponse, err error) {
query := helpers.CopyQuery(r.query)
header := helpers.CopyHeader(r.header)
uri := &url.URL{
Path: r.path,
RawQuery: query.Encode(),
}
request := &http.Request{
Method: "GET",
URL: uri,
Header: header,
}
if ctx != nil {
request = request.WithContext(ctx)
}
response, err := r.transport.RoundTrip(request)
if err != nil {
return
}
defer response.Body.Close()
result = &ClusterGetResponse{}
result.status = response.StatusCode
result.header = response.Header
if result.status >= 400 {
result.err, err = errors.UnmarshalError(response.Body)
if err != nil {
return
}
err = result.err
return
}
err = readClusterGetResponse(result, response.Body)
if err != nil {
return
}
return
}
// ClusterGetResponse is the response for the 'get' method.
type ClusterGetResponse struct {
status int
header http.Header
err *errors.Error
body *Cluster
}
// Status returns the response status code.
func (r *ClusterGetResponse) Status() int {
if r == nil {
return 0
}
return r.status
}
// Header returns header of the response.
func (r *ClusterGetResponse) Header() http.Header {
if r == nil {
return nil
}
return r.header
}
// Error returns the response error.
func (r *ClusterGetResponse) Error() *errors.Error {
if r == nil {
return nil
}
return r.err
}
// Body returns the value of the 'body' parameter.
//
//
func (r *ClusterGetResponse) Body() *Cluster {
if r == nil {
return nil
}
return r.body
}
// GetBody returns the value of the 'body' parameter and
// a flag indicating if the parameter has a value.
//
//
func (r *ClusterGetResponse) GetBody() (value *Cluster, ok bool) {
ok = r != nil && r.body != nil
if ok {
value = r.body
}
return
}
// ClusterHibernateRequest is the request for the 'hibernate' method.
type ClusterHibernateRequest struct {
transport http.RoundTripper
path string
query url.Values
header http.Header
}
// Parameter adds a query parameter.
func (r *ClusterHibernateRequest) Parameter(name string, value interface{}) *ClusterHibernateRequest {
helpers.AddValue(&r.query, name, value)
return r
}
// Header adds a request header.
func (r *ClusterHibernateRequest) Header(name string, value interface{}) *ClusterHibernateRequest {
helpers.AddHeader(&r.header, name, value)
return r
}
// Send sends this request, waits for the response, and returns it.
//
// This is a potentially lengthy operation, as it requires network communication.
// Consider using a context and the SendContext method.
func (r *ClusterHibernateRequest) Send() (result *ClusterHibernateResponse, err error) {
return r.SendContext(context.Background())
}
// SendContext sends this request, waits for the response, and returns it.
func (r *ClusterHibernateRequest) SendContext(ctx context.Context) (result *ClusterHibernateResponse, err error) {
query := helpers.CopyQuery(r.query)
header := helpers.CopyHeader(r.header)
uri := &url.URL{
Path: r.path,
RawQuery: query.Encode(),
}
request := &http.Request{
Method: "POST",
URL: uri,
Header: header,
}
if ctx != nil {
request = request.WithContext(ctx)
}
response, err := r.transport.RoundTrip(request)
if err != nil {
return
}
defer response.Body.Close()
result = &ClusterHibernateResponse{}
result.status = response.StatusCode
result.header = response.Header
if result.status >= 400 {
result.err, err = errors.UnmarshalError(response.Body)
if err != nil {
return
}
err = result.err
return
}
return
}
// ClusterHibernateResponse is the response for the 'hibernate' method.
type ClusterHibernateResponse struct {
status int
header http.Header
err *errors.Error
}
// Status returns the response status code.
func (r *ClusterHibernateResponse) Status() int {
if r == nil {
return 0
}
return r.status
}
// Header returns header of the response.
func (r *ClusterHibernateResponse) Header() http.Header {
if r == nil {
return nil
}
return r.header
}
// Error returns the response error.
func (r *ClusterHibernateResponse) Error() *errors.Error {
if r == nil {
return nil
}
return r.err
}
// ClusterResumeRequest is the request for the 'resume' method.
type ClusterResumeRequest struct {
transport http.RoundTripper
path string
query url.Values
header http.Header
}
// Parameter adds a query parameter.
func (r *ClusterResumeRequest) Parameter(name string, value interface{}) *ClusterResumeRequest {
helpers.AddValue(&r.query, name, value)
return r
}
// Header adds a request header.
func (r *ClusterResumeRequest) Header(name string, value interface{}) *ClusterResumeRequest {
helpers.AddHeader(&r.header, name, value)
return r
}
// Send sends this request, waits for the response, and returns it.
//
// This is a potentially lengthy operation, as it requires network communication.
// Consider using a context and the SendContext method.
func (r *ClusterResumeRequest) Send() (result *ClusterResumeResponse, err error) {
return r.SendContext(context.Background())
}
// SendContext sends this request, waits for the response, and returns it.
func (r *ClusterResumeRequest) SendContext(ctx context.Context) (result *ClusterResumeResponse, err error) {
query := helpers.CopyQuery(r.query)
header := helpers.CopyHeader(r.header)
uri := &url.URL{
Path: r.path,
RawQuery: query.Encode(),
}
request := &http.Request{
Method: "POST",
URL: uri,
Header: header,
}
if ctx != nil {
request = request.WithContext(ctx)
}
response, err := r.transport.RoundTrip(request)
if err != nil {
return
}
defer response.Body.Close()
result = &ClusterResumeResponse{}
result.status = response.StatusCode
result.header = response.Header
if result.status >= 400 {
result.err, err = errors.UnmarshalError(response.Body)
if err != nil {
return
}
err = result.err
return
}
return
}
// ClusterResumeResponse is the response for the 'resume' method.
type ClusterResumeResponse struct {
status int
header http.Header
err *errors.Error
}
// Status returns the response status code.
func (r *ClusterResumeResponse) Status() int {
if r == nil |
return r.status
}
// Header returns header of the response.
func (r *ClusterResumeResponse) Header() http.Header {
if r == nil {
return nil
}
return r.header
}
// Error returns the response error.
func (r *ClusterResumeResponse) Error() *errors.Error {
if r == nil {
return nil
}
return r.err
}
// ClusterUpdateRequest is the request for the 'update' method.
type ClusterUpdateRequest struct {
transport http.RoundTripper
path string
query url.Values
header http.Header
body *Cluster
}
// Parameter adds a query parameter.
func (r *ClusterUpdateRequest) Parameter(name string, value interface{}) *ClusterUpdateRequest {
helpers.AddValue(&r.query, name, value)
return r
}
// Header adds a request header.
func (r *ClusterUpdateRequest) Header(name string, value interface{}) *ClusterUpdateRequest {
helpers.AddHeader(&r.header, name, value)
return r
}
// Body sets the value of the 'body' parameter.
//
//
func (r *ClusterUpdateRequest) Body(value *Cluster) *ClusterUpdateRequest {
r.body = value
return r
}
// Send sends this request, waits for the response, and returns it.
//
// This is a potentially lengthy operation, as it requires network communication.
// Consider using a context and the SendContext method.
func (r *ClusterUpdateRequest) Send() (result *ClusterUpdateResponse, err error) {
return r.SendContext(context.Background())
}
// SendContext sends this request, waits for the response, and returns it.
func (r *ClusterUpdateRequest) SendContext(ctx context.Context) (result *ClusterUpdateResponse, err error) {
query := helpers.CopyQuery(r.query)
header := helpers.CopyHeader(r.header)
buffer := &bytes.Buffer{}
err = writeClusterUpdateRequest(r, buffer)
if err != nil {
return
}
uri := &url.URL{
Path: r.path,
RawQuery: query.Encode(),
}
request := &http.Request{
Method: "PATCH",
URL: uri,
Header: header,
Body: ioutil.NopCloser(buffer),
}
if ctx != nil {
request = request.WithContext(ctx)
}
response, err := r.transport.RoundTrip(request)
if err != nil {
return
}
defer response.Body.Close()
result = &ClusterUpdateResponse{}
result.status = response.StatusCode
result.header = response.Header
if result.status >= 400 {
result.err, err = errors.UnmarshalError(response.Body)
if err != nil {
return
}
err = result.err
return
}
err = readClusterUpdateResponse(result, response.Body)
if err != nil {
return
}
return
}
// marshall is the method used internally to marshal requests for the
// 'update' method.
func (r *ClusterUpdateRequest) marshal(writer io.Writer) error {
stream := helpers.NewStream(writer)
r.stream(stream)
return stream.Error
}
func (r *ClusterUpdateRequest) stream(stream *jsoniter.Stream) {
}
// ClusterUpdateResponse is the response for the 'update' method.
type ClusterUpdateResponse struct {
status int
header http.Header
err *errors.Error
body *Cluster
}
// Status returns the response status code.
func (r *ClusterUpdateResponse) Status() int {
if r == nil {
return 0
}
return r.status
}
// Header returns header of the response.
func (r *ClusterUpdateResponse) Header() http.Header {
if r == nil {
return nil
}
return r.header
}
// Error returns the response error.
func (r *ClusterUpdateResponse) Error() *errors.Error {
if r == nil {
return nil
}
return r.err
}
// Body returns the value of the 'body' parameter.
//
//
func (r *ClusterUpdateResponse) Body() *Cluster {
if r == nil {
return nil
}
return r.body
}
// GetBody returns the value of the 'body' parameter and
// a flag indicating if the parameter has a value.
//
//
func (r *ClusterUpdateResponse) GetBody() (value *Cluster, ok bool) {
ok = r != nil && r.body != nil
if ok {
value = r.body
}
return
}
| {
return 0
} |
webAppVnetConnection.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20201201
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Virtual Network information contract.
type WebAppVnetConnection struct {
pulumi.CustomResourceState
// A certificate file (.cer) blob containing the public key of the private key used to authenticate a
// Point-To-Site VPN connection.
CertBlob pulumi.StringPtrOutput `pulumi:"certBlob"`
// The client certificate thumbprint.
CertThumbprint pulumi.StringOutput `pulumi:"certThumbprint"`
// DNS servers to be used by this Virtual Network. This should be a comma-separated list of IP addresses.
DnsServers pulumi.StringPtrOutput `pulumi:"dnsServers"`
// Flag that is used to denote if this is VNET injection
IsSwift pulumi.BoolPtrOutput `pulumi:"isSwift"`
// Kind of resource.
Kind pulumi.StringPtrOutput `pulumi:"kind"`
// Resource Name.
Name pulumi.StringOutput `pulumi:"name"`
// <code>true</code> if a resync is required; otherwise, <code>false</code>.
ResyncRequired pulumi.BoolOutput `pulumi:"resyncRequired"`
// The routes that this Virtual Network connection uses.
Routes VnetRouteResponseArrayOutput `pulumi:"routes"`
// Resource type.
Type pulumi.StringOutput `pulumi:"type"`
// The Virtual Network's resource ID.
VnetResourceId pulumi.StringPtrOutput `pulumi:"vnetResourceId"`
}
// NewWebAppVnetConnection registers a new resource with the given unique name, arguments, and options.
func NewWebAppVnetConnection(ctx *pulumi.Context,
name string, args *WebAppVnetConnectionArgs, opts ...pulumi.ResourceOption) (*WebAppVnetConnection, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.Name == nil {
return nil, errors.New("invalid value for required argument 'Name'")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:web/v20201201:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20150801:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20150801:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20160801:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20160801:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20180201:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20180201:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20181101:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20181101:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20190801:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20190801:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20200601:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20200601:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20200901:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20200901:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-native:web/v20201001:WebAppVnetConnection"),
},
{
Type: pulumi.String("azure-nextgen:web/v20201001:WebAppVnetConnection"),
},
})
opts = append(opts, aliases)
var resource WebAppVnetConnection
err := ctx.RegisterResource("azure-native:web/v20201201:WebAppVnetConnection", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetWebAppVnetConnection gets an existing WebAppVnetConnection resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetWebAppVnetConnection(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *WebAppVnetConnectionState, opts ...pulumi.ResourceOption) (*WebAppVnetConnection, error) {
var resource WebAppVnetConnection
err := ctx.ReadResource("azure-native:web/v20201201:WebAppVnetConnection", name, id, state, &resource, opts...) | return &resource, nil
}
// Input properties used for looking up and filtering WebAppVnetConnection resources.
type webAppVnetConnectionState struct {
// A certificate file (.cer) blob containing the public key of the private key used to authenticate a
// Point-To-Site VPN connection.
CertBlob *string `pulumi:"certBlob"`
// The client certificate thumbprint.
CertThumbprint *string `pulumi:"certThumbprint"`
// DNS servers to be used by this Virtual Network. This should be a comma-separated list of IP addresses.
DnsServers *string `pulumi:"dnsServers"`
// Flag that is used to denote if this is VNET injection
IsSwift *bool `pulumi:"isSwift"`
// Kind of resource.
Kind *string `pulumi:"kind"`
// Resource Name.
Name *string `pulumi:"name"`
// <code>true</code> if a resync is required; otherwise, <code>false</code>.
ResyncRequired *bool `pulumi:"resyncRequired"`
// The routes that this Virtual Network connection uses.
Routes []VnetRouteResponse `pulumi:"routes"`
// Resource type.
Type *string `pulumi:"type"`
// The Virtual Network's resource ID.
VnetResourceId *string `pulumi:"vnetResourceId"`
}
type WebAppVnetConnectionState struct {
// A certificate file (.cer) blob containing the public key of the private key used to authenticate a
// Point-To-Site VPN connection.
CertBlob pulumi.StringPtrInput
// The client certificate thumbprint.
CertThumbprint pulumi.StringPtrInput
// DNS servers to be used by this Virtual Network. This should be a comma-separated list of IP addresses.
DnsServers pulumi.StringPtrInput
// Flag that is used to denote if this is VNET injection
IsSwift pulumi.BoolPtrInput
// Kind of resource.
Kind pulumi.StringPtrInput
// Resource Name.
Name pulumi.StringPtrInput
// <code>true</code> if a resync is required; otherwise, <code>false</code>.
ResyncRequired pulumi.BoolPtrInput
// The routes that this Virtual Network connection uses.
Routes VnetRouteResponseArrayInput
// Resource type.
Type pulumi.StringPtrInput
// The Virtual Network's resource ID.
VnetResourceId pulumi.StringPtrInput
}
func (WebAppVnetConnectionState) ElementType() reflect.Type {
return reflect.TypeOf((*webAppVnetConnectionState)(nil)).Elem()
}
type webAppVnetConnectionArgs struct {
// A certificate file (.cer) blob containing the public key of the private key used to authenticate a
// Point-To-Site VPN connection.
CertBlob *string `pulumi:"certBlob"`
// DNS servers to be used by this Virtual Network. This should be a comma-separated list of IP addresses.
DnsServers *string `pulumi:"dnsServers"`
// Flag that is used to denote if this is VNET injection
IsSwift *bool `pulumi:"isSwift"`
// Kind of resource.
Kind *string `pulumi:"kind"`
// Name of the app.
Name string `pulumi:"name"`
// Name of the resource group to which the resource belongs.
ResourceGroupName string `pulumi:"resourceGroupName"`
// Name of an existing Virtual Network.
VnetName *string `pulumi:"vnetName"`
// The Virtual Network's resource ID.
VnetResourceId *string `pulumi:"vnetResourceId"`
}
// The set of arguments for constructing a WebAppVnetConnection resource.
type WebAppVnetConnectionArgs struct {
// A certificate file (.cer) blob containing the public key of the private key used to authenticate a
// Point-To-Site VPN connection.
CertBlob pulumi.StringPtrInput
// DNS servers to be used by this Virtual Network. This should be a comma-separated list of IP addresses.
DnsServers pulumi.StringPtrInput
// Flag that is used to denote if this is VNET injection
IsSwift pulumi.BoolPtrInput
// Kind of resource.
Kind pulumi.StringPtrInput
// Name of the app.
Name pulumi.StringInput
// Name of the resource group to which the resource belongs.
ResourceGroupName pulumi.StringInput
// Name of an existing Virtual Network.
VnetName pulumi.StringPtrInput
// The Virtual Network's resource ID.
VnetResourceId pulumi.StringPtrInput
}
func (WebAppVnetConnectionArgs) ElementType() reflect.Type {
return reflect.TypeOf((*webAppVnetConnectionArgs)(nil)).Elem()
}
type WebAppVnetConnectionInput interface {
pulumi.Input
ToWebAppVnetConnectionOutput() WebAppVnetConnectionOutput
ToWebAppVnetConnectionOutputWithContext(ctx context.Context) WebAppVnetConnectionOutput
}
func (*WebAppVnetConnection) ElementType() reflect.Type {
return reflect.TypeOf((*WebAppVnetConnection)(nil))
}
func (i *WebAppVnetConnection) ToWebAppVnetConnectionOutput() WebAppVnetConnectionOutput {
return i.ToWebAppVnetConnectionOutputWithContext(context.Background())
}
func (i *WebAppVnetConnection) ToWebAppVnetConnectionOutputWithContext(ctx context.Context) WebAppVnetConnectionOutput {
return pulumi.ToOutputWithContext(ctx, i).(WebAppVnetConnectionOutput)
}
type WebAppVnetConnectionOutput struct {
*pulumi.OutputState
}
func (WebAppVnetConnectionOutput) ElementType() reflect.Type {
return reflect.TypeOf((*WebAppVnetConnection)(nil))
}
func (o WebAppVnetConnectionOutput) ToWebAppVnetConnectionOutput() WebAppVnetConnectionOutput {
return o
}
func (o WebAppVnetConnectionOutput) ToWebAppVnetConnectionOutputWithContext(ctx context.Context) WebAppVnetConnectionOutput {
return o
}
func init() {
pulumi.RegisterOutputType(WebAppVnetConnectionOutput{})
} | if err != nil {
return nil, err
} |
client.go | // Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v20181127
import (
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common"
tchttp "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/http"
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/profile"
)
const APIVersion = "2018-11-27"
type Client struct {
common.Client
}
// Deprecated
func NewClientWithSecretId(secretId, secretKey, region string) (client *Client, err error) {
cpf := profile.NewClientProfile()
client = &Client{}
client.Init(region).WithSecretId(secretId, secretKey).WithProfile(cpf)
return
}
func NewClient(credential common.CredentialIface, region string, clientProfile *profile.ClientProfile) (client *Client, err error) {
client = &Client{}
client.Init(region).
WithCredential(credential).
WithProfile(clientProfile)
return
}
func NewDescribeVideoTaskRequest() (request *DescribeVideoTaskRequest) {
request = &DescribeVideoTaskRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("ticm", APIVersion, "DescribeVideoTask")
return
}
func NewDescribeVideoTaskResponse() (response *DescribeVideoTaskResponse) {
response = &DescribeVideoTaskResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// DescribeVideoTask
// 提交完视频审核任务后,可以通过本接口来获取当前处理的进度和结果
//
// 可能返回的错误码:
// FAILEDOPERATION_UNKNOWERROR = "FailedOperation.UnKnowError"
func (c *Client) DescribeVideoTask(request *DescribeVideoTaskRequest) (response *DescribeVideoTaskResponse, err error) {
if request == nil {
request = NewDescribeVideoTaskRequest()
}
response = NewDescribeVideoTaskResponse()
err = c.Send(request, response)
return
}
func NewImageModerationRequest() (request *ImageModerationRequest) {
request = &ImageModerationRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("ticm", APIVersion, "ImageModeration")
return
}
func NewImageModerationResponse() (response *ImageModerationResponse) {
response = &ImageModerationResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// ImageModeration
// 本接口提供多种维度的图像审核能力,支持色情和性感内容识别,政治人物和涉政敏感场景识别,以及暴恐人物、场景、旗帜标识等违禁内容的识别。
//
// 可能返回的错误码:
// FAILEDOPERATION_DOWNLOADERROR = "FailedOperation.DownLoadError"
// FAILEDOPERATION_INVOKECHARGEERROR = "FailedOperation.InvokeChargeError"
// FAILEDOPERATION_UNKNOWERROR = "FailedOperation.UnKnowError"
// FAILEDOPERATION_UNOPENERROR = "FailedOperation.UnOpenError"
// INVALIDPARAMETERVALUE_INVALIDPARAMETERVALUELIMIT = "InvalidParameterValue.InvalidParameterValueLimit"
// LIMITEXCEEDED_TOOLARGEFILEERROR = "LimitExceeded.TooLargeFileError"
// RESOURCESSOLDOUT_CHARGESTATUSEXCEPTION = "ResourcesSoldOut.ChargeStatusException"
func (c *Client) ImageModeration(request *ImageModerationRequest) (response *ImageModerationResponse, err error) {
if request == nil {
request = NewImageModerationRequest()
}
response = NewImageModerationResponse()
err = c.Send(request, response)
return | BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("ticm", APIVersion, "VideoModeration")
return
}
func NewVideoModerationResponse() (response *VideoModerationResponse) {
response = &VideoModerationResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// VideoModeration
// 本接口提供多种维度的视频审核能力,支持色情和性感内容识别,政治人物和涉政敏感场景识别,以及暴恐人物、场景、旗帜标识等违禁内容的识别。
//
// 可能返回的错误码:
// FAILEDOPERATION_UNKNOWERROR = "FailedOperation.UnKnowError"
// INVALIDPARAMETERVALUE_INVALIDPARAMETERVALUELIMIT = "InvalidParameterValue.InvalidParameterValueLimit"
func (c *Client) VideoModeration(request *VideoModerationRequest) (response *VideoModerationResponse, err error) {
if request == nil {
request = NewVideoModerationRequest()
}
response = NewVideoModerationResponse()
err = c.Send(request, response)
return
} | }
func NewVideoModerationRequest() (request *VideoModerationRequest) {
request = &VideoModerationRequest{ |
main.rs | use s3::{bucket::Bucket, creds::Credentials};
use std::{boxed::Box, error::Error};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "presign", about = "Generate a presigned S3 URL for GET or PUT")]
struct | {
#[structopt(
short,
long,
default_value = "us-east-1",
help("The AWS region the bucket is in")
)]
region: String,
#[structopt(short, long, help("The bucket name for the presigned URL"))]
bucket_name: String,
#[structopt(short, long, help("The file for the presigned URL"))]
file: String,
#[structopt(
short,
long,
default_value = "put",
help("The method for the presigned URL")
)]
method: String,
#[structopt(
short,
long,
default_value = "3600",
help("The expiration, in seconds, for the presigned URL")
)]
expiration: u32,
#[structopt(short, long, help("The AWS profile to use"))]
profile: Option<String>,
}
fn main() -> Result<(), Box<dyn Error>> {
let args = Cli::from_args();
let credentials = Credentials::from_profile(args.profile.as_deref())?;
let bucket = Bucket::new(&args.bucket_name, args.region.parse()?, credentials)?;
let url = match args.method.as_str() {
"put" => bucket.presign_put(&args.file, args.expiration),
_ => bucket.presign_get(&args.file, args.expiration),
};
match url {
Ok(url) => {
println!("{}", url);
return Ok(());
}
Err(err) => return Err(Box::new(err)),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_all_options() -> Result<(), String> {
let vec = vec![
"presign",
"--bucket-name",
"mybucket",
"--expiration",
"60",
"--file",
"foo.txt",
"--method",
"get",
"--profile",
"awsprofile",
"--region",
"us-west-2",
];
let args = Cli::from_iter(vec.iter());
assert_eq!(args.bucket_name, "mybucket");
assert_eq!(args.expiration, 60);
assert_eq!(args.file, "foo.txt");
assert_eq!(args.method, "get");
assert_eq!(args.profile, Some("awsprofile".to_string()));
assert_eq!(args.region, "us-west-2");
Ok(())
}
#[test]
fn test_defaults() -> Result<(), String> {
let vec = vec!["presign", "--bucket-name", "mybucket", "--file", "foo.txt"];
let args = Cli::from_iter(vec.iter());
assert_eq!(args.bucket_name, "mybucket");
assert_eq!(args.expiration, 3600);
assert_eq!(args.file, "foo.txt");
assert_eq!(args.method, "put");
assert_eq!(args.profile, None);
assert_eq!(args.region, "us-east-1");
Ok(())
}
}
| Cli |
migratecomware.py | # This is a script to migrate infrastructure from Comware-based switches, such as the
# HPE A-series, to Meraki MS switches. The script reads an input file which defines which
# Comware switch will be migrated to which MS. Configuration is read from Comware through SSH,
# converted to Meraki form and uploaded to the Meraki cloud using the Dashboard API.
#
# Comware devices are referenced by IP address. Meraki devices are referenced by serial number.
#
# You need to have Python 3 and the Requests module installed. You
# can download the module here: https://github.com/kennethreitz/requests
# or install it using pip.
#
# The script also requires the Paramiko module for SSH functions. More info about installing Paramiko
# can be found here: http://www.paramiko.org/installing.html
#
# This script uses spaces for indentation. Do not use the Tab character when modifying it.
#
# To run the script, enter:
# python migratecomware.py -k <API key> -o <org name> -i <init file> [-u <default user>] [-p <default pass>] [-m <operating mode>]
#
# To make script chaining easier, all lines containing informational messages to the user
# start with the character @
#
#HOW TO CREATE AN INITIALIZATION FILE:
#An initialization file with device mappings is required for migratecomware.py
#
#For an example of a correct init config file, please see:
# https://github.com/meraki/automation-scripts/blob/master/migration_init_file.txt
#
#Initialization file #Syntax:
# * Blank lines and lines only containing whitespace will be ignored.
# * Use lines beginning with # as comments. These lines will be ignored.
# * Use "net=Network_name" to define a network. A network definition line must exist before any
# device definition lines.
# * Device definition lines. These lines define the IP address of the original Comware switch,
# the Meraki MS switch serial number the configuration will be transferred to and optionally
# a SSH username and password to log into the Comware device. If username and password are
# omitted, default credentials will be used. These lines can have four forms:
# <device_ip> <serial_number>
# <device_ip> <serial_number> <username> <password>
# file <filename> <serial_number>
#
#Examples of net definition and device definition lines, commented out:
#
#net=Migrated headquarters network
#10.1.1.20 AAAA-BBBB-CCCC admin admin
#10.1.1.21 AAAA-BBBB-DDDD admin@system admin123
#file myconfig.cfg BBBB-CCCC-DDDD
#
#net=Migrated branch network
#192.168.10.10 AAAA-BBBB-EEEE
import sys, getopt, requests, json, paramiko, re
class c_portconfig:
def __init__(self):
self.name = '' #WORD
self.type = 'null' #copper speed or sfp
self.number = '0' #number of this type of interface type+number must be a unique combination
self.mode = 'access' #access or trunk
self.vlan = '1' #access VLAN or trunk native VLAN
self.allowedvlans = '' #trunk allowed VLANs
self.enabled = 'true' #values: true/false
self.voicevlan = '' #voice VLAN
self.poeenabled = '' #values: true/false
self.rstp = ''
self.isolation = ''
self.stpguard = ''
#end class
class c_merakidevice:
def __init__(self):
self.hostname= 'unnamed'#hostname for device
self.serial = '' #serial number of destination device
self.netname = '' #network this device belongs to
self.srcip = '' #source device IP address to pull config from. leave blank if file
self.srcfile = '' #source file to pull config from. leave blank if IP/SSH
self.srcuser = '' #source SSH username. leave blank if file
self.srcpass = '' #source SSH password. leave blank if file
self.rawcfg = [] #raw configuration as extracted from source. fields are strings
self.portcfg = [] #port configuration of this device. fields are instances of c_portconfig()
#end class
def printusertext(p_message):
#prints a line of text that is meant for the user to read
#do not process these lines when chaining scripts
print('@ %s' % p_message)
def printhelp():
#prints help text
printusertext('')
printusertext('This is a script to migrate infrastructure from Comware-based switches, such as the')
printusertext(' HPE A-series, to Meraki MS switches. The script reads an input file which defines which')
printusertext(' Comware switch will be migrated to which MS. Configuration is read from Comware through SSH,')
printusertext(' converted to Meraki form and uploaded to the Meraki cloud using the Dashboard API.')
printusertext('')
printusertext('To run the script, enter:')
printusertext('python migratecomware.py -k <API key> -o <org> -i <init file> [-u <default user>] [-p <default pass>] [-m <mode>]')
printusertext('')
printusertext('The script needs a valid initialization configuration file to run (parameter -i).')
printusertext(" For syntax help please see the comment lines in the beginning of this script's code.")
printusertext('')
printusertext('Parameter "-m" has 3 valid forms:')
printusertext(' * -m simulation : This is the default mode. The script will print to output a simulation')
printusertext(' of what changes will be made to what switch. If the target devices are not part of the')
printusertext(' organization defined in "-o", the script will fail.')
printusertext(' * -m simulation+claim : The script will print to output a simulation')
printusertext(' of what changes will be made to what switch. If the target devices are not part of the')
printusertext(' organization defined in "-o", the script will attempt to claim it and read needed info.')
printusertext(' * -m commit : The script will migrate Comware configuration to the Meraki cloud.')
printusertext('')
printusertext(' Example:')
printusertext(' python migratecomware.py -k 1234 -o MyOrg -i initconfig.txt -u foo -p bar -m commit')
printusertext('')
printusertext('Use double quotes ("") in Windows to pass arguments containing spaces. Names are case-sensitive.')
### SECTION: Functions for interacting with SSH and files
def loadinitcfg(p_filename, p_defaultuser, p_defaultpass):
#loads initial configuration from a file with network and device definitions
configtable = [] #to be filled with c_merakidevice() instances
networkdefined = False
currentnet = ''
dcount = 0
linenum = 0
try:
f = open(p_filename, 'r')
except:
return(configtable)
#iterate through file and parse lines
for line in f:
linenum += 1
stripped = line.strip()
#drop blank lines
if len(stripped) > 0:
#drop comments
if stripped[0] != '#':
#process network definition lines
if stripped [:4] == 'net=':
if len(stripped[4:]) > 0:
currentnet = stripped[4:]
networkdefined = True
else:
printusertext('ERROR: Init config (line %d): Network name cannot be blank' % linenum)
sys.exit(2)
else:
#else process as a device record
if networkdefined:
splitline = stripped.split()
if len(splitline) > 1:
#look for file keyword and load config accordingly
if splitline[0] == 'file':
if len(splitline) > 2:
configtable.append(c_merakidevice())
configtable[dcount].netname = currentnet
configtable[dcount].srcfile = splitline[1]
configtable[dcount].serial = splitline[2]
dcount += 1
else:
printusertext('ERROR: Init config (line %d): Invalid definition: %s' % (linenum, stripped))
sys.exit(2)
else:
#not a source file definition. assume FQDN/IP
configtable.append(c_merakidevice())
configtable[dcount].netname = currentnet
configtable[dcount].srcip = splitline[0]
configtable[dcount].serial = splitline[1]
if len(splitline) > 3:
#device-specific username and password defined
configtable[dcount].srcuser = splitline[2]
configtable[dcount].srcpass = splitline[3]
elif len(splitline) > 2:
#got either username or password, but not both
printusertext('ERROR: Init config (line %d): Invalid definition: %s' % (linenum, stripped))
sys.exit(2)
else:
#no device-specific username/password configuration. use defaults
#abort if default user/password are invalid
if p_defaultuser == '\n' or p_defaultpass == '\n':
printusertext('ERROR: Default SSH credentials needed, but not defined')
sys.exit(2)
configtable[dcount].srcuser = p_defaultuser
configtable[dcount].srcpass = p_defaultpass
dcount += 1
else:
printusertext('ERROR: Init config (line %d): Invalid definition: %s' % (linenum, stripped))
sys.exit(2)
else:
printusertext('ERROR: Init config (line %d): Device with no network defined' % linenum)
sys.exit(2)
dcount += 1
f.close()
return (configtable)
def loadcomwareconfig (p_hostip, p_user, p_pass):
#logs into a comware-based device using SSH and pulls its current configuration
#returns a single line 'null' on SSH errors
linetable = []
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(p_hostip, username=p_user, password=p_pass)
stdin, stdout, stderr = ssh.exec_command("display current")
#THE LINE BELOW IS USED TO DISMISS "MORE" PROMPTS WHEN DISPLAYING CONFIG. ADJUST # OF SPACES IF NEEDED
stdin.write(' \n')
stdin.flush()
except:
printusertext('WARNING: Could not connect to source device: %s' % p_hostip)
linetable.append('null')
return (linetable)
strippedline = []
initiated = False
for line in stdout.read().splitlines():
if len(line) > 0:
strippedline = line.strip().decode('ascii')
# drop all lines before the first prompt (login banner, etc)
# a login banner line starting with "<" and ending with ">" may cause the script to fail
# check for sequence '<hostname>'
if strippedline.startswith('<') and strippedline.endswith('>'):
initiated = True
if initiated and strippedline[0] != '<':
# check all long lines to see if they start with " ---- More ----"
if len(strippedline) > 15:
# look for sequence "---"
if strippedline[:3] == '---':
# remove garbage from beginning of line
strippedline = strippedline[19:].lstrip()[5:].lstrip()
# drop comments, check for character 35: "#"
if strippedline[0] != '#':
# store ascii representations of received characters
linetable.append(strippedline)
return (linetable)
def loadcomwarecfgfile(p_filename):
#loads source device configuration from file
linetable = []
try:
f = open(p_filename, 'r')
except:
linetable.append('null')
printusertext('WARNING: Could not read source config file: %s' % p_filename)
return(linetable)
strippedline = ''
for line in f:
strippedline = line.strip()
if len(strippedline) > 0:
#ignore comments
if strippedline[0] != '#':
linetable.append(strippedline)
f.close()
return (linetable)
def extracthostname(p_rawcfg):
#extract hostname form device config
#command parser loop
for cfgline in p_rawcfg:
pieces = cfgline.split()
if pieces[0] == 'sysname':
return (pieces[1])
return ('')
def extractportcfg(p_rawcfg):
#extracts port (interface) configuration from a comware configuration table
intcfg = []
intcount = 0
avlan = '' #string for building allowed VLAN value
supportedinterface = False
#command parser loop
for cfgline in p_rawcfg:
pieces = cfgline.split()
if pieces[0] == 'description' and supportedinterface:
#set int desc as port name. strip everything except alphanumerics and "_"
intcfg[intcount-1].name = re.sub(r'\W+','', cfgline[12:])[:20]
elif pieces[0] == 'interface':
#if interface is of a supported type, create new entry. otherwise ignore it
#and lock int command parsing functions until a supported one comes up
if pieces[1][:15] == 'GigabitEthernet':
intcfg.append(c_portconfig())
intcfg[intcount].type = 'GigabitEthernet'
#WARNING: THE LINE BELOW ONLY WORKS PROPERLY FOR 1RU SWITCHES
| supportedinterface = True
else:
supportedinterface = False
elif pieces[0] == 'port' and supportedinterface:
if pieces[1] == 'access':
if pieces[2] == 'vlan':
intcfg[intcount-1].vlan = pieces[3]
if pieces[1] == 'link-type':
intcfg[intcount-1].mode = pieces[2]
if pieces[1] == 'trunk':
if pieces[2] == 'permit':
#example Comware command: port link-type trunk permit vlan 10 50 to 60
if pieces[3] == 'vlan':
avlan = ''
for i in range(4, len(pieces)):
if pieces[i] == 'to':
avlan += '-'
else:
if len(avlan) == 0:
avlan += pieces[i]
elif avlan[len(avlan)-1] == '-':
avlan += pieces[i]
else:
avlan += ',%s' % pieces[i]
intcfg[intcount-1].allowedvlans = avlan
if pieces[2] == 'pvid':
if pieces[3] == 'vlan':
intcfg[intcount-1].vlan = pieces[4]
#elif pieces[0] == 'port-security':
#DEBUG: keep the line below commented, unless debugging this function
#printusertext ('DEBUG: Port security: %s' % pieces[1])
# if intcount == 0:
#still in global config
# if pieces[1] == 'enable':
#printusertext ('DEBUG: Enable port-security')
# continue
elif pieces[0] == 'shutdown' and supportedinterface:
intcfg[intcount-1].enabled = 'false'
#elif pieces[0] == 'undo' and supportedinterface:
#DEBUG: keep the line below commented, unless debugging this function
#printusertext ('DEBUG: Undo for int [%d]: %s' % (intcount, pieces[1]))
# if pieces[1] == 'dot1x':
#printusertext ('DEBUG: Dot1x: %s' % pieces[2])
# continue
#else:
#DEBUG: keep the line below commented, unless debugging this function
#print ('DEBUG: Invalid line')
return(intcfg)
### SECTION: Functions for interacting with Dashboard
def getorgid(p_apikey, p_orgname):
#looks up org id for a specific org name
#on failure returns 'null'
r = requests.get('https://dashboard.meraki.com/api/v0/organizations', headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
rjson = r.json()
for record in rjson:
if record['name'] == p_orgname:
return record['id']
return('null')
def getshardurl(p_apikey, p_orgid):
#patch
return("api-mp.meraki.com")
def getnwid(p_apikey, p_shardurl, p_orgid, p_nwname):
#looks up network id for a network name
#on failure returns 'null'
r = requests.get('https://%s/api/v0/organizations/%s/networks' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
rjson = r.json()
for record in rjson:
if record['name'] == p_nwname:
return record['id']
return('null')
def createnw(p_apikey, p_shardurl, p_dstorg, p_nwdata):
#creates network if one does not already exist with the same name
#example for p_nwdata:
#nwparams = {'name': 'hi', 'timeZone': 'Europe/Helsinki', 'tags': 'mytag', 'organizationId': '123', 'type': 'switch appliance'}
#check if network exists
getnwresult = getnwid(p_apikey, p_shardurl, p_dstorg, p_nwdata['name'])
if getnwresult != 'null':
printusertext('WARNING: Skipping network "%s" (Already exists)' % p_nwdata['name'])
return('null')
if p_nwdata['type'] == 'combined':
#find actual device types
nwtype = 'wireless switch appliance'
else:
nwtype = p_nwdata['type']
if nwtype != 'systems manager':
r = requests.post('https://%s/api/v0/organizations/%s/networks' % (p_shardurl, p_dstorg), data=json.dumps({'timeZone': p_nwdata['timeZone'], 'tags': p_nwdata['tags'], 'name': p_nwdata['name'], 'organizationId': p_dstorg, 'type': nwtype}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
else:
printusertext('WARNING: Skipping network "%s" (Cannot create SM networks)' % p_nwdata['name'])
return('null')
return('ok')
def claimdevice(p_apikey, p_shardurl, p_nwid, p_devserial):
#claims a device into an org
r = requests.post('https://%s/api/v0/networks/%s/devices/claim' % (p_shardurl, p_nwid), data=json.dumps({'serial': p_devserial}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
return(0)
def claimdeviceorg(p_apikey, p_shardurl, p_orgid, p_devserial):
#claims a device into an org without adding to a network
r = requests.post('https://%s/api/v0/organizations/%s/claim' % (p_shardurl, p_orgid), data=json.dumps({'serial': p_devserial}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
return(0)
def getorgdeviceinfo (p_apikey, p_shardurl, p_orgid, p_devserial):
#gets basic device info from org inventory. device does not need to be part of a network
r = requests.get('https://%s/api/v0/organizations/%s/inventory' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
returnvalue = {}
if r.status_code != requests.codes.ok:
returnvalue = {'serial':'null', 'model':'null'}
return(returnvalue)
rjson = r.json()
foundserial = False
for record in rjson:
if record['serial'] == p_devserial:
foundserial = True
returnvalue = {'mac': record['mac'], 'serial': record['serial'], 'networkId': record['networkId'], 'model': record['model'], 'claimedAt': record['claimedAt'], 'publicIp': record['publicIp']}
if not foundserial:
returnvalue = {'serial':'null', 'model':'null'}
return(returnvalue)
def setswportconfig(p_apikey, p_shardurl, p_devserial, p_portnum, p_portcfg):
#sets switchport configuration to match table given as parameter
validconfig = {}
for key, value in p_portcfg.items():
if value != '':
validconfig[key] = value
r = requests.put('https://%s/api/v0/devices/%s/switchPorts/%s' % (p_shardurl, p_devserial, p_portnum), data=json.dumps(validconfig), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
return (0)
def setdevicedata(p_apikey, p_shardurl, p_nwid, p_devserial, p_field, p_value, p_movemarker):
#modifies value of device record. Returns the new value
#on failure returns one device record, with all values 'null'
#p_movemarker is boolean: True/False
movevalue = "false"
if p_movemarker:
movevalue = "true"
r = requests.put('https://%s/api/v0/networks/%s/devices/%s' % (p_shardurl, p_nwid, p_devserial), data=json.dumps({p_field: p_value, 'moveMapMarker': movevalue}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return ('null')
return('ok')
def migratedevices(p_apikey, p_shardurl, p_orgid, p_devt, p_mode):
#migrates configuration according to device table p_devt. has three modes according to p_mode
#p_mode = 'commit' : uploads configuration to Meraki cloud
#p_mode = 'simulation': prints intended changes to stdout without touching cloud. will fail if device not in inventory
#p_mode = 'simulation+claim': prints intended changes to stdout without touching cloud. will attempt to claim devices if they are not in inventory to get info
mode_commit = False
mode_claim = False
nwid = ''
portconfig = {}
max_migrated_ports = 0
if p_mode == 'commit':
mode_commit = True
mode_claim = True
elif p_mode == 'simulation+claim':
mode_claim = True
for dev in p_devt:
nwid = getnwid(p_apikey, p_shardurl, p_orgid, dev.netname)
if nwid == 'null' and mode_commit:
#if nw missing and commit mode, it needs to be created
#nwid == 'null' is OK if running simulation
#NOTE THAT TIMEZONE IS HARDCODED IN THE SCRIPT AT THIS POINT. THIS MAY CHANGE IN A LATER VERSION
nwparams = {'name': dev.netname, 'timeZone': 'Europe/Helsinki', 'tags': 'migratecomwarepy', 'organizationId': p_orgid, 'type': 'switch'}
createnw(p_apikey, p_shardurl, p_orgid, nwparams)
nwid = getnwid(p_apikey, p_shardurl, p_orgid, dev.netname)
#check if something went wrong
if nwid == 'null':
printusertext('ERROR: Unable to get ID for network %s' % dev.netname)
sys.exit(2)
#get model of device to check that it is a switch
devinfo = getorgdeviceinfo (p_apikey, p_shardurl, p_orgid, dev.serial)
if devinfo['model'] == 'null':
if mode_claim:
claimdeviceorg(p_apikey, p_shardurl, p_orgid, dev.serial)
devinfo = getorgdeviceinfo (p_apikey, p_shardurl, p_orgid, dev.serial)
if devinfo['model'] == 'null':
printusertext('ERROR: Unable to claim device %s' % dev.serial)
sys.exit(2)
else:
printusertext('ERROR: Device %s not part of org %s' % (dev.serial, p_orgid))
sys.exit(2)
if devinfo['model'][:2] != 'MS':
printusertext('ERROR: Device %s is type "%s": Not a switch' % (dev.serial, devinfo['model']))
sys.exit(2)
#at this stage we have nwid and device model
#the switch may or may not be part of a network, so cannot read number of ports dynamically.
#it will need to be done as part of a static configuration list
#assumes model name convention of MXxxx-yyzz, where xxx: model series, yy:number of ports, zz:poe
modelnumber = re.sub(r'[^0-9]','',devinfo['model'][:5])
portnumber = re.sub(r'[^0-9]','',devinfo['model'][6:])
if modelnumber == '220':
if portnumber == '8':
max_migrated_ports = 10
elif portnumber == '24':
max_migrated_ports = 28
elif portnumber == '48':
max_migrated_ports = 52
elif modelnumber == '225':
if portnumber == '24':
max_migrated_ports = 28
elif portnumber == '48':
max_migrated_ports = 52
elif modelnumber == '250':
if portnumber == '24':
max_migrated_ports = 28
elif portnumber == '48':
max_migrated_ports = 52
elif modelnumber == '350':
if portnumber == '24':
max_migrated_ports = 28
elif portnumber == '48':
max_migrated_ports = 52
elif modelnumber == '410':
if portnumber == '16':
max_migrated_ports = 18
elif portnumber == '32':
max_migrated_ports = 34
elif modelnumber == '425':
if portnumber == '16':
max_migrated_ports = 18
elif portnumber == '32':
max_migrated_ports = 34
else:
#if unknown device model, assume 0 uplinks as failsafe, until the script is updated to support it
intportnumber = int(portnumber)
#if Meraki switch nodel naming has changed from MSxxx-yy, the line below will fail
if intportnumber <= 48:
max_migrated_ports = intportnumber
#deal with port number mismatches
if len(dev.portcfg) < max_migrated_ports:
max_migrated_ports = len(dev.portcfg)
#now that we also know the MAC address of the device, we can also reset the hostname
#for devices that did not get a value by running extracthostname() previously
if dev.hostname == '':
dev.hostname = devinfo['mac']
#do preliminary stuff, like claiming device to nw or printing header
if mode_commit:
claimdevice(p_apikey, p_shardurl, nwid, dev.serial)
devinfo = getorgdeviceinfo (p_apikey, p_shardurl, p_orgid, dev.serial)
if devinfo['networkId'] != nwid:
printusertext('ERROR: Unable set network for device %s' % dev.serial)
sys.exit(2)
#set hostname. Don't worry if it fails
setdevicedata(p_apikey, p_shardurl, nwid, dev.serial, 'name', dev.hostname, 'false')
printusertext('INFO: Migrating device %s (name: %s), source %s%s' % (dev.serial, dev.hostname, dev.srcip, dev.srcfile))
else:
print('')
print('Migration target device %s (name: %s, %s) in network "%s"' % (dev.serial, dev.hostname, devinfo['model'],dev.netname))
print('Source: %s%s' % (dev.srcip, dev.srcfile))
print('Num Name Mode Enabled VLAN PoE VoiceVLAN TrnkAllowVLAN')
for i in range (0, max_migrated_ports):
portconfig = {'isolationEnabled': dev.portcfg[i].isolation, 'rstpEnabled': dev.portcfg[i].rstp, 'enabled': dev.portcfg[i].enabled, 'stpGuard': dev.portcfg[i].stpguard, 'accessPolicyNumber': '', 'type': dev.portcfg[i].mode, 'allowedVlans': dev.portcfg[i].allowedvlans, 'poeEnabled': dev.portcfg[i].poeenabled, 'name': dev.portcfg[i].name, 'tags': 'migratecomwarepy', 'number': dev.portcfg[i].number, 'vlan': dev.portcfg[i].vlan, 'voiceVlan': dev.portcfg[i].voicevlan}
if mode_commit:
setswportconfig(p_apikey, p_shardurl, dev.serial, dev.portcfg[i].number, portconfig)
else:
print('%s %s %s %s %s %s %s %s' % ("{:>3s}".format(portconfig['number']), "{:>20s}".format(portconfig['name']), "{:>7s}".format(portconfig['type']), "{:>6s}".format(portconfig['enabled']), "{:>5s}".format(portconfig['vlan']), "{:>7s}".format(portconfig['poeEnabled']), "{:>5s}".format(portconfig['voiceVlan']), portconfig['allowedVlans']))
return() #migratedevices()
### SECTION: Main function
def main(argv):
#set default values for command line arguments
arg_apikey = 'null'
arg_orgname = 'null'
arg_initfile = '????' #a default value that is not a valid filename
arg_defuser = '\n' #a default value that is not a valid username
arg_defpass = '\n' #a default value that is not a valid password
arg_mode = 'simulation'
#get command line arguments
# python deployappliance.py -k <key> -o <org> -s <serial> -n <network name> -t <template>
try:
opts, args = getopt.getopt(argv, 'hk:o:i:u:p:m:')
except getopt.GetoptError:
printhelp()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
printhelp()
sys.exit()
elif opt == '-k':
arg_apikey = arg
elif opt == '-o':
arg_orgname = arg
elif opt == '-i':
arg_initfile = arg
elif opt == '-u':
arg_defuser = arg
elif opt == '-p':
arg_defpass = arg
elif opt == '-m':
arg_mode = arg
#check if all required parameters have been given
if arg_apikey == 'null' or arg_orgname == 'null' or arg_initfile == '????':
printhelp()
sys.exit(2)
#get organization id corresponding to org name provided by user
orgid = getorgid(arg_apikey, arg_orgname)
if orgid == 'null':
printusertext('ERROR: Fetching organization failed')
sys.exit(2)
#get shard URL where Org is stored
shardurl = getshardurl(arg_apikey, orgid)
if shardurl == 'null':
printusertext('ERROR: Fetching Meraki cloud shard FQDN failed')
sys.exit(2)
#load configuration file
devices = loadinitcfg(arg_initfile, arg_defuser, arg_defpass)
if len(devices) == 0:
printusertext('ERROR: No valid configuration in init file')
sys.exit(2)
#read configuration from source devices specified in init config
for i in range(0, len(devices)):
if devices[i].srcip != '':
devices[i].rawcfg = loadcomwareconfig (devices[i].srcip, devices[i].srcuser, devices[i].srcpass)
else:
devices[i].rawcfg = loadcomwarecfgfile (devices[i].srcfile)
#extract port configuration from source configuration
for dev in devices:
dev.hostname = extracthostname(dev.rawcfg)
dev.portcfg = extractportcfg(dev.rawcfg)
#run migration function in correct operating mode
if arg_mode == 'simulation':
migratedevices(arg_apikey, shardurl, orgid, devices, 'simulation')
elif arg_mode == 'commit':
migratedevices(arg_apikey, shardurl, orgid, devices, 'commit')
elif arg_mode == 'simulation+claim':
migratedevices(arg_apikey, shardurl, orgid, devices, 'simulation+claim')
else:
printusertext('ERROR: Parameter -m: Operating mode not valid')
sys.exit(2)
printusertext('End of script.')
if __name__ == '__main__':
main(sys.argv[1:]) | intcfg[intcount].number = pieces[1].split('/')[-1] #only take last number in string
intcount += 1
|
main.go | package main
import (
"time"
"github.com/joshi4/context"
)
func main() {
ctx := context.Background()
timeout(ctx)
deadline(ctx)
value(ctx)
cancel(ctx)
context.Trail(ctx)
}
func timeout(ctx context.Context) {
_, cancel := context.WithTimeout(ctx, 10*time.Second)
cancel()
}
func cancel(ctx context.Context) {
_, cancel := context.WithCancel(ctx) | func value(ctx context.Context) {
_ = context.WithValue(ctx, "key", "value")
}
func deadline(ctx context.Context) {
_, cancel := context.WithDeadline(ctx, time.Now().Add(5*time.Second))
cancel()
}
// Current output of go run main.go
//main.main: /Users/shantanu/go/src/github.com/joshi4/context/x/main.go 10
//main.timeout: /Users/shantanu/go/src/github.com/joshi4/context/x/main.go 19
//main.deadline: /Users/shantanu/go/src/github.com/joshi4/context/x/main.go 33
//main.value: /Users/shantanu/go/src/github.com/joshi4/context/x/main.go 29
//main.cancel: /Users/shantanu/go/src/github.com/joshi4/context/x/main.go 24 | cancel()
}
|
featuredbot.go | package teambot
import (
"fmt"
"time"
"github.com/keybase/client/go/chat/utils"
"github.com/keybase/client/go/libkb"
"github.com/keybase/client/go/protocol/gregor1"
"github.com/keybase/client/go/protocol/keybase1"
)
const cacheLifetime = 24 * time.Hour
type featuredBotsCache struct {
Data keybase1.FeaturedBotsRes `codec:"d" json:"d"`
Ctime gregor1.Time `codec:"c" json:"c"`
}
func (c featuredBotsCache) isFresh() bool {
return time.Since(c.Ctime.Time()) <= cacheLifetime
}
type FeaturedBotLoader struct {
libkb.Contextified
}
func NewFeaturedBotLoader(g *libkb.GlobalContext) *FeaturedBotLoader {
return &FeaturedBotLoader{
Contextified: libkb.NewContextified(g),
}
}
func (l *FeaturedBotLoader) debug(mctx libkb.MetaContext, msg string, args ...interface{}) {
l.G().Log.CDebugf(mctx.Ctx(), "FeaturedBotLoader: %s", fmt.Sprintf(msg, args...))
}
func (l *FeaturedBotLoader) Search(mctx libkb.MetaContext, arg keybase1.SearchArg) (res keybase1.SearchRes, err error) {
defer mctx.TraceTimed("FeaturedBotLoader: Search", func() error { return err })()
defer func() {
if err == nil {
res.Bots = l.present(mctx, res.Bots)
}
}()
apiRes, err := mctx.G().API.Get(mctx, libkb.APIArg{
Endpoint: "featured_bots/search",
SessionType: libkb.APISessionTypeNONE,
Args: libkb.HTTPArgs{
"query": libkb.S{Val: arg.Query},
"limit": libkb.I{Val: arg.Limit},
"offset": libkb.I{Val: arg.Offset},
},
})
if err != nil {
return res, err
}
err = apiRes.Body.UnmarshalAgain(&res)
return res, err
}
func (l *FeaturedBotLoader) featuredBotsFromServer(mctx libkb.MetaContext, arg keybase1.FeaturedBotsArg) (res keybase1.FeaturedBotsRes, err error) {
apiRes, err := mctx.G().API.Get(mctx, libkb.APIArg{
Endpoint: "featured_bots/featured",
SessionType: libkb.APISessionTypeNONE,
Args: libkb.HTTPArgs{
"limit": libkb.I{Val: arg.Limit},
"offset": libkb.I{Val: arg.Offset},
},
})
if err != nil {
return res, err
} |
func (l *FeaturedBotLoader) dbKey(arg keybase1.FeaturedBotsArg) libkb.DbKey {
return libkb.DbKey{
Typ: libkb.DBFeaturedBots,
Key: fmt.Sprintf("fb:%d:%d", arg.Limit, arg.Offset),
}
}
func (l *FeaturedBotLoader) featuredBotsFromStorage(mctx libkb.MetaContext, arg keybase1.FeaturedBotsArg) (res keybase1.FeaturedBotsRes, found bool, err error) {
dbKey := l.dbKey(arg)
var cachedData featuredBotsCache
found, err = mctx.G().GetKVStore().GetInto(&cachedData, dbKey)
if err != nil || !found {
return res, false, err
}
if !cachedData.isFresh() {
l.debug(mctx, "featuredBotsFromStorage: data not fresh, ctime: %v", cachedData.Ctime)
return res, false, nil
}
return cachedData.Data, true, nil
}
func (l *FeaturedBotLoader) storeFeaturedBots(mctx libkb.MetaContext, arg keybase1.FeaturedBotsArg, res keybase1.FeaturedBotsRes) error {
l.debug(mctx, "storeFeaturedBots: storing %d bots", len(res.Bots))
dbKey := l.dbKey(arg)
return mctx.G().GetKVStore().PutObj(dbKey, nil, featuredBotsCache{
Data: res,
Ctime: gregor1.ToTime(time.Now()),
})
}
func (l *FeaturedBotLoader) present(mctx libkb.MetaContext, bots []keybase1.FeaturedBot) (res []keybase1.FeaturedBot) {
res = make([]keybase1.FeaturedBot, len(bots))
for index, bot := range bots {
res[index] = bot
res[index].ExtendedDescription = utils.PresentDecoratedUserBio(mctx.Ctx(), bot.ExtendedDescription)
}
return res
}
func (l *FeaturedBotLoader) syncFeaturedBots(mctx libkb.MetaContext, arg keybase1.FeaturedBotsArg, existingData *keybase1.FeaturedBotsRes) (res keybase1.FeaturedBotsRes, err error) {
defer mctx.TraceTimed("FeaturedBotLoader: syncFeaturedBots", func() error { return err })()
res, err = l.featuredBotsFromServer(mctx, arg)
if err != nil {
l.debug(mctx, "syncFeaturedBots: failed to load from server: %s", err)
return res, err
}
if existingData != nil && !res.Eq(*existingData) { // only write out data if it changed
if err := l.storeFeaturedBots(mctx, arg, res); err != nil {
l.debug(mctx, "syncFeaturedBots: failed to store result: %s", err)
return res, err
}
}
l.G().NotifyRouter.HandleFeaturedBots(mctx.Ctx(), l.present(mctx, res.Bots), arg.Limit, arg.Offset)
return res, nil
}
func (l *FeaturedBotLoader) FeaturedBots(mctx libkb.MetaContext, arg keybase1.FeaturedBotsArg) (res keybase1.FeaturedBotsRes, err error) {
defer mctx.TraceTimed("FeaturedBotLoader: FeaturedBots", func() error { return err })()
defer func() {
if err == nil {
res.Bots = l.present(mctx, res.Bots)
}
}()
if arg.SkipCache {
return l.syncFeaturedBots(mctx, arg, nil)
}
// send up local copy first quickly
res, found, err := l.featuredBotsFromStorage(mctx, arg)
if err != nil {
l.debug(mctx, "FeaturedBots: failed to load from local storage: %s", err)
} else if found {
l.debug(mctx, "FeaturedBots: returning cached data")
l.G().NotifyRouter.HandleFeaturedBots(mctx.Ctx(), l.present(mctx, res.Bots), arg.Limit, arg.Offset)
go func() {
mctx = libkb.NewMetaContextBackground(l.G())
if _, err := l.syncFeaturedBots(mctx, arg, &res); err != nil {
l.debug(mctx, "FeaturedBots: unable to fetch from server in background: %v", err)
}
}()
return res, err
}
return l.syncFeaturedBots(mctx, arg, nil)
} | err = apiRes.Body.UnmarshalAgain(&res)
return res, err
} |
pointtotag_response.py | # coding: utf-8
"""
Syntropy Rule service
Syntropy Rule service # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class PointtotagResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {"data": "PointtotagPointToTag"}
attribute_map = {"data": "data"}
def | (self, data=None): # noqa: E501
"""PointtotagResponse - a model defined in Swagger""" # noqa: E501
self._data = None
self.discriminator = None
if data is not None:
self.data = data
@property
def data(self):
"""Gets the data of this PointtotagResponse. # noqa: E501
:return: The data of this PointtotagResponse. # noqa: E501
:rtype: PointtotagPointToTag
"""
return self._data
@data.setter
def data(self, data):
"""Sets the data of this PointtotagResponse.
:param data: The data of this PointtotagResponse. # noqa: E501
:type: PointtotagPointToTag
"""
self._data = data
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
if issubclass(PointtotagResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PointtotagResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| __init__ |
infer.py | import os
import torch
from osgeo import gdal
import numpy as np
from warnings import warn
from .model_io import get_model
from .transform import process_aug_dict
from .datagen import InferenceTiler
from ..raster.image import stitch_images, create_multiband_geotiff
from ..utils.core import get_data_paths
class Inferer(object):
|
def get_infer_df(config):
"""Get the inference df based on the contents of ``config`` .
This function uses the logic described in the documentation for the config
file to determine where to find images to be used for inference.
See the docs and the comments in solaris/data/config_skeleton.yml for
details.
Arguments
---------
config : dict
The loaded configuration dict for model training and/or inference.
Returns
-------
infer_df : :class:`dict`
:class:`dict` containing at least one column: ``'image'`` . The values
in this column correspond to the path to filenames to perform inference
on.
"""
infer_df = get_data_paths(config['inference_data_csv'], infer=True)
return infer_df
| """Object for training `solaris` models using PyTorch or Keras."""
def __init__(self, config, custom_model_dict=None):
self.config = config
self.batch_size = self.config['batch_size']
self.framework = self.config['nn_framework']
self.model_name = self.config['model_name']
# check if the model was trained as part of the same pipeline; if so,
# use the output from that. If not, use the pre-trained model directly.
if self.config['train']:
warn('Because the configuration specifies both training and '
'inference, solaris is switching the model weights path '
'to the training output path.')
self.model_path = self.config['training']['model_dest_path']
if custom_model_dict is not None:
custom_model_dict['weight_path'] = self.config[
'training']['model_dest_path']
else:
self.model_path = self.config.get('model_path', None)
self.model = get_model(self.model_name, self.framework,
self.model_path, pretrained=True,
custom_model_dict=custom_model_dict)
self.window_step_x = self.config['inference'].get('window_step_size_x',
None)
self.window_step_y = self.config['inference'].get('window_step_size_y',
None)
if self.window_step_x is None:
self.window_step_x = self.config['data_specs']['width']
if self.window_step_y is None:
self.window_step_y = self.config['data_specs']['height']
self.stitching_method = self.config['inference'].get(
'stitching_method', 'average')
self.output_dir = self.config['inference']['output_dir']
if not os.path.isdir(self.output_dir):
os.makedirs(self.output_dir)
def __call__(self, infer_df=None):
"""Run inference.
Arguments
---------
infer_df : :class:`pandas.DataFrame` or `str`
A :class:`pandas.DataFrame` with a column, ``'image'``, specifying
paths to images for inference. Alternatively, `infer_df` can be a
path to a CSV file containing the same information. Defaults to
``None``, in which case the file path specified in the Inferer's
configuration dict is used.
"""
if infer_df is None:
infer_df = get_infer_df(self.config)
inf_tiler = InferenceTiler(
self.framework,
width=self.config['data_specs']['width'],
height=self.config['data_specs']['height'],
x_step=self.window_step_x,
y_step=self.window_step_y,
augmentations=process_aug_dict(
self.config['inference_augmentation']))
for idx, im_path in enumerate(infer_df['image']):
temp_im = gdal.Open(im_path)
proj = temp_im.GetProjection()
gt = temp_im.GetGeoTransform()
inf_input, idx_refs, (
src_im_height, src_im_width) = inf_tiler(im_path)
if self.framework == 'keras':
subarr_preds = self.model.predict(inf_input,
batch_size=self.batch_size)
elif self.framework in ['torch', 'pytorch']:
with torch.no_grad():
self.model.eval()
if torch.cuda.is_available():
device = torch.device('cuda')
self.model = self.model.cuda()
else:
device = torch.device('cpu')
inf_input = torch.from_numpy(inf_input).float().to(device)
# add additional input data, if applicable
if self.config['data_specs'].get('additional_inputs',
None) is not None:
inf_input = [inf_input]
for i in self.config['data_specs']['additional_inputs']:
inf_input.append(
infer_df[i].iloc[idx].to(device))
subarr_preds = self.model(inf_input)
subarr_preds = subarr_preds.cpu().data.numpy()
stitched_result = stitch_images(subarr_preds,
idx_refs=idx_refs,
out_width=src_im_width,
out_height=src_im_height,
method=self.stitching_method)
stitched_result = np.swapaxes(stitched_result, 1, 0)
stitched_result = np.swapaxes(stitched_result, 2, 0)
create_multiband_geotiff(stitched_result,
os.path.join(self.output_dir,
os.path.split(im_path)[1]),
proj=proj, geo=gt, nodata=np.nan,
out_format=gdal.GDT_Float32) |
slider.go | // SPDX-License-Identifier: Unlicense OR MIT
package material
import (
"image"
"image/color"
"github.com/cybriq/p9/pkg/gel/gio/f32"
"github.com/cybriq/p9/pkg/gel/gio/internal/f32color"
"github.com/cybriq/p9/pkg/gel/gio/layout"
"github.com/cybriq/p9/pkg/gel/gio/op"
"github.com/cybriq/p9/pkg/gel/gio/op/clip"
"github.com/cybriq/p9/pkg/gel/gio/op/paint"
"github.com/cybriq/p9/pkg/gel/gio/unit"
"github.com/cybriq/p9/pkg/gel/gio/widget"
)
// Slider is for selecting a value in a range.
func Slider(th *Theme, float *widget.Float, min, max float32) SliderStyle {
return SliderStyle{
Min: min,
Max: max,
Color: th.Palette.ContrastBg,
Float: float,
FingerSize: th.FingerSize,
}
}
type SliderStyle struct {
Min, Max float32
Color color.NRGBA
Float *widget.Float
FingerSize unit.Value
}
func (s SliderStyle) Layout(gtx layout.Context) layout.Dimensions {
thumbRadius := gtx.Px(unit.Dp(6))
trackWidth := gtx.Px(unit.Dp(2))
axis := s.Float.Axis
// Keep a minimum length so that the track is always visible.
minLength := thumbRadius + 3*thumbRadius + thumbRadius
// Try to expand to finger size, but only if the constraints
// allow for it.
touchSizePx := min(gtx.Px(s.FingerSize),
axis.Convert(gtx.Constraints.Max).Y,
)
sizeMain := max(axis.Convert(gtx.Constraints.Min).X, minLength)
sizeCross := max(2*thumbRadius, touchSizePx)
size := axis.Convert(image.Pt(sizeMain, sizeCross))
st := op.Save(gtx.Ops)
o := axis.Convert(image.Pt(thumbRadius, 0))
op.Offset(layout.FPt(o)).Add(gtx.Ops)
gtx.Constraints.Min = axis.Convert(image.Pt(sizeMain-2*thumbRadius,
sizeCross,
),
)
s.Float.Layout(gtx, thumbRadius, s.Min, s.Max)
gtx.Constraints.Min = gtx.Constraints.Min.Add(axis.Convert(image.Pt(0,
sizeCross,
),
),
)
thumbPos := thumbRadius + int(s.Float.Pos())
st.Load()
color := s.Color
if gtx.Queue == nil {
color = f32color.Disabled(color)
}
// Draw track before thumb.
st = op.Save(gtx.Ops)
track := image.Rectangle{
Min: axis.Convert(image.Pt(thumbRadius, sizeCross/2-trackWidth/2)),
Max: axis.Convert(image.Pt(thumbPos, sizeCross/2+trackWidth/2)),
}
clip.Rect(track).Add(gtx.Ops)
paint.Fill(gtx.Ops, color)
st.Load()
// Draw track after thumb.
st = op.Save(gtx.Ops)
track = image.Rectangle{
Min: axis.Convert(image.Pt(thumbPos, axis.Convert(track.Min).Y)),
Max: axis.Convert(image.Pt(sizeMain-thumbRadius,
axis.Convert(track.Max).Y,
),
),
}
clip.Rect(track).Add(gtx.Ops)
paint.Fill(gtx.Ops, f32color.MulAlpha(color, 96))
st.Load()
// Draw thumb.
pt := axis.Convert(image.Pt(thumbPos, sizeCross/2))
paint.FillShape(gtx.Ops, color,
clip.Circle{
Center: f32.Point{X: float32(pt.X), Y: float32(pt.Y)},
Radius: float32(thumbRadius),
}.Op(gtx.Ops),
)
return layout.Dimensions{Size: size}
}
func max(a, b int) int |
func min(a, b int) int {
if a < b {
return a
}
return b
}
| {
if a > b {
return a
}
return b
} |
utils.py | # -*- coding: utf8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
CAMEL_RE = re.compile(r'([A-Z][a-z]+|[A-Z]+(?=[A-Z\s]|$))')
def de_camel_case(text):
"""Convert CamelCase names to human-readable format."""
return ' '.join(w.strip() for w in CAMEL_RE.split(text) if w.strip())
def list_to_dict(object_list, key_attribute='id'):
"""Converts an object list to a dict
:param object_list: list of objects to be put into a dict
:type object_list: list
:param key_attribute: object attribute used as index by dict
:type key_attribute: str
:return: dict containing the objects in the list
:rtype: dict
"""
return dict((getattr(o, key_attribute), o) for o in object_list)
def length(iterator):
"""A length function for iterators
Returns the number of items in the specified iterator. Note that this
function consumes the iterator in the process.
"""
return sum(1 for _item in iterator)
| def check_image_type(image, image_type):
"""Check if image 'type' property matches passed-in image_type.
If image has no 'type' property' return True, as we cannot
be sure what type of image it is.
"""
return (image.properties.get('type', image_type) == image_type)
def filter_items(items, **kwargs):
"""Filters the list of items and returns the filtered list.
Example usage:
>>> class Item(object):
... def __init__(self, index):
... self.index = index
... def __repr__(self):
... return '<Item index=%d>' % self.index
>>> items = [Item(i) for i in range(7)]
>>> list(filter_items(items, index=1))
[<Item index=1>]
>>> list(filter_items(items, index__in=(1, 2, 3)))
[<Item index=1>, <Item index=2>, <Item index=3>]
>>> list(filter_items(items, index__not_in=(1, 2, 3)))
[<Item index=0>, <Item index=4>, <Item index=5>, <Item index=6>]
"""
for item in items:
for name, value in kwargs.items():
if name.endswith('__in'):
if getattr(item, name[:-len('__in')]) not in value:
break
elif name.endswith('__not_in'):
if getattr(item, name[:-len('__not_in')]) in value:
break
else:
if getattr(item, name) != value:
break
else:
yield item
def safe_int_cast(value):
try:
return int(value)
except (TypeError, ValueError):
return 0 | |
category-routes.component.ts | import {Component, Input, OnInit} from '@angular/core';
import {RouteDocument} from '../../database/types/route';
@Component({
selector: 'app-category-routes',
templateUrl: './category-routes.component.html',
styleUrls: ['./category-routes.component.scss']
})
export class | implements OnInit {
@Input() routes: RouteDocument[];
constructor() { }
ngOnInit() {
}
onDelete(route: RouteDocument) {
const result = confirm(`Are you sure that you would like to delete ${route.name}?`);
if (result === true) {
route.remove();
}
}
}
| CategoryRoutesComponent |
aa.go | package main
import (
"fmt"
"sync"
)
var wg sync.WaitGroup
var chs = make([]chan int, 10)
func Add(x, y int, yy chan int) {
defer wg.Done()
<-yy
z := x + y
fmt.Println(z)
if x+1 < len(chs) {
chs[x+1] <- 1
}
}
func main() |
type Hh struct {
id int
}
type number struct {
f float32
}
type nr number
| {
a := Hh{5}
//b := nr{5.0}
// var i float32 = b // compile-error: cannot use b (type nr) as type float32 in assignment
// var i = float32(b) // compile-error: cannot convert b (type nr) to type float32
// var c number = b // compile-error: cannot use b (type nr) as type number in assignment
// needs a conversion:
//var c = number(b)
fmt.Println(a)
} |
hash.go | // Copyright Fuzamei Corp. 2018 All Rights Reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package common
import (
"math/big"
"github.com/holiman/uint256"
"github.com/33cn/chain33/common"
)
const (
// HashLength
HashLength = 32
// Hash160Length Hash160
Hash160Length = 20
// AddressLength
AddressLength = 20
)
// Hash
type Hash common.Hash
// Str
func (h Hash) Str() string { return string(h[:]) }
// Bytes
func (h Hash) Bytes() []byte { return h[:] }
// Big
func (h Hash) Big() *big.Int { return new(big.Int).SetBytes(h[:]) }
// Hex
func (h Hash) Hex() string { return Bytes2Hex(h[:]) }
// SetBytes , , ,
| b = b[len(b)-HashLength:]
}
copy(h[HashLength-len(b):], b)
}
// BigToHash
func BigToHash(b *big.Int) Hash {
return Hash(common.BytesToHash(b.Bytes()))
}
// Uint256ToHash
func Uint256ToHash(u *uint256.Int) Hash {
return Hash(common.BytesToHash(u.Bytes()))
}
// BytesToHash []byte
func BytesToHash(b []byte) Hash {
return Hash(common.BytesToHash(b))
}
// ToHash []byte
func ToHash(data []byte) Hash {
return BytesToHash(common.Sha256(data))
} | func (h *Hash) SetBytes(b []byte) {
if len(b) > len(h) {
|
ListIPSetsCommand.ts | import { ServiceInputTypes, ServiceOutputTypes, WAFV2ClientResolvedConfig } from "../WAFV2Client";
import { ListIPSetsRequest, ListIPSetsResponse } from "../models/models_0";
import {
deserializeAws_json1_1ListIPSetsCommand,
serializeAws_json1_1ListIPSetsCommand,
} from "../protocols/Aws_json1_1";
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
export interface ListIPSetsCommandInput extends ListIPSetsRequest {}
export interface ListIPSetsCommandOutput extends ListIPSetsResponse, __MetadataBearer {}
/** | export class ListIPSetsCommand extends $Command<
ListIPSetsCommandInput,
ListIPSetsCommandOutput,
WAFV2ClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: ListIPSetsCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: WAFV2ClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<ListIPSetsCommandInput, ListIPSetsCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "WAFV2Client";
const commandName = "ListIPSetsCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: ListIPSetsRequest.filterSensitiveLog,
outputFilterSensitiveLog: ListIPSetsResponse.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: ListIPSetsCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_json1_1ListIPSetsCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<ListIPSetsCommandOutput> {
return deserializeAws_json1_1ListIPSetsCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
} | * <p>Retrieves an array of <a>IPSetSummary</a> objects for the IP sets that you manage.</p>
*/ |
test_notify.py | # Copyright 2010 ITA Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from twisted.trial import unittest
from nagcat import notify
import coil
ENVIRONMENT_HOST = {
# Host Macros
'NAGIOS_HOSTNAME': "localhost",
'NAGIOS_HOSTDISPLAYNAME': "localhost",
'NAGIOS_HOSTALIAS': "localhost",
'NAGIOS_HOSTADDRESS': "127.0.0.1",
'NAGIOS_HOSTSTATE': "UP",
'NAGIOS_HOSTSTATEID': "0",
'NAGIOS_LASTHOSTSTATE': "UP",
'NAGIOS_LASTHOSTSTATEID': "0",
'NAGIOS_HOSTSTATETYPE': "HARD",
'NAGIOS_HOSTATTEMPT': "1",
'NAGIOS_MAXHOSTATTEMPTS': "3",
'NAGIOS_HOSTEVENTID': "0",
'NAGIOS_LASTHOSTEVENTID': "0",
'NAGIOS_HOSTPROBLEMID': "0",
'NAGIOS_LASTHOSTPROBLEMID': "0",
'NAGIOS_HOSTLATENCY': "0.123",
'NAGIOS_HOSTEXECUTIONTIME': "4.012",
'NAGIOS_HOSTDURATION': "35d 15h 31m 49s",
'NAGIOS_HOSTDURATIONSEC': "3079909",
'NAGIOS_HOSTDOWNTIME': "0",
'NAGIOS_HOSTPERCENTCHANGE': "0.0",
'NAGIOS_HOSTGROUPNAMES': "a_group,b_group",
'NAGIOS_LASTHOSTCHECK': "1260009929",
'NAGIOS_LASTHOSTSTATECHANGE': "1256929950",
'NAGIOS_LASTHOSTUP': "1260009939",
'NAGIOS_LASTHOSTDOWN': "0",
'NAGIOS_LASTHOSTUNREACHABLE': "0",
'NAGIOS_HOSTOUTPUT': "PING OK - Packet loss = 0%, RTA = 2.00 ms",
'NAGIOS_LONGHOSTOUTPUT': "",
'NAGIOS_HOSTPERFDATA': "rta=10.778000ms;3000.000000;5000.000000;0.000000 pl=0%;80;100;0",
'NAGIOS_HOSTCHECKCOMMAND': "check_host_alive",
'NAGIOS_HOSTACTIONURL': "",
'NAGIOS_HOSTNOTESURL': "",
'NAGIOS_HOSTNOTES': "",
'NAGIOS_TOTALHOSTSERVICES': "39",
'NAGIOS_TOTALHOSTSERVICESOK': "38",
'NAGIOS_TOTALHOSTSERVICESWARNING': "0",
'NAGIOS_TOTALHOSTSERVICESCRITICAL': "1",
'NAGIOS_TOTALHOSTSERVICESUNKNOWN': "0",
# Host Group Macros
'NAGIOS_HOSTGROUPNAME': "a_group",
'NAGIOS_HOSTGROUPALIAS': "A Group",
'NAGIOS_HOSTGROUPMEMBERS': "localhost",
'NAGIOS_HOSTGROUPNOTES': "",
'NAGIOS_HOSTGROUPNOTESURL': "",
'NAGIOS_HOSTGROUPACTIONURL': "",
# Contact Macros
'NAGIOS_CONTACTNAME': "root",
'NAGIOS_CONTACTALIAS': "Mr. Big",
'NAGIOS_CONTACTEMAIL': "root@localhost",
'NAGIOS_CONTACTPAGER': "pager@localhost",
'NAGIOS_CONTACTGROUPNAMES': "admins,managers",
# The address fields could be anything...
#'NAGIOS_CONTACTADDRESS0': "",
# Contact Group Macros
'NAGIOS_CONTACTGROUPNAME': "admins",
'NAGIOS_CONTACTGROUPALIAS': "Admins",
'NAGIOS_CONTACTGROUPMEMBERS': "root,luser",
# Summary Macros (NAGIOS_TOTAL*) are not always available
# so they are not included here...
# Notification Macros
'NAGIOS_NOTIFICATIONTYPE': "PROBLEM",
'NAGIOS_NOTIFICATIONRECIPIENTS': "root",
'NAGIOS_NOTIFICATIONISESCALATED': "0",
'NAGIOS_NOTIFICATIONAUTHOR': "",
'NAGIOS_NOTIFICATIONAUTHORNAME': "",
'NAGIOS_NOTIFICATIONAUTHORALIAS': "",
'NAGIOS_NOTIFICATIONCOMMENT': "",
'NAGIOS_NOTIFICATIONNUMBER': "1",
'NAGIOS_HOSTNOTIFICATIONNUMBER': "0",
'NAGIOS_HOSTNOTIFICATIONID': "0",
'NAGIOS_SERVICENOTIFICATIONNUMBER': "1",
'NAGIOS_SERVICENOTIFICATIONID': "409161",
# Date/Time Macros
'NAGIOS_LONGDATETIME': "Sun Dec 6 04:25:32 EST 2009",
'NAGIOS_SHORTDATETIME': "12-06-2009 04:25:33",
'NAGIOS_DATE': "12-06-2009",
'NAGIOS_TIME': "04:25:34",
'NAGIOS_TIMET': "1260091534",
# File Macros:
'NAGIOS_MAINCONFIGFILE': "/path/to/nagios.cfg",
'NAGIOS_STATUSDATAFILE': "/path/to/status.dat",
'NAGIOS_RETENTIONDATAFILE': "/path/to/retention.dat",
'NAGIOS_OBJECTCACHEFILE': "/path/to/objects.cache",
'NAGIOS_TEMPFILE': "/path/to/nagios.tmp",
'NAGIOS_TEMPPATH': "/tmp",
'NAGIOS_LOGFILE': "/path/to/nagios.log",
'NAGIOS_RESOURCEFILE': "/path/to/resource.cfg",
'NAGIOS_COMMANDFILE': "/path/to/nagios.cmd",
# Misc Macros:
'NAGIOS_PROCESSSTARTTIME': "1259966149",
'NAGIOS_EVENTSTARTTIME': "1259966149",
'NAGIOS_ADMINEMAIL': "root@localhost",
'NAGIOS_ADMINPAGER': "pager@localhost",
# These are available but could be anything...
#'NAGIOS_ARG0': "",
#'NAGIOS_USER0': "",
}
ENVIRONMENT_SERVICE = {
# Service Macros
'NAGIOS_SERVICEDESC': "PING",
'NAGIOS_SERVICEDISPLAYNAME': "PING",
'NAGIOS_SERVICESTATE': "CRITICAL",
'NAGIOS_SERVICESTATEID': "2",
'NAGIOS_LASTSERVICESTATE': "CRITICAL",
'NAGIOS_LASTSERVICESTATEID': "2",
'NAGIOS_SERVICESTATETYPE': "HARD",
'NAGIOS_SERVICEATTEMPT': "3",
'NAGIOS_MAXSERVICEATTEMPTS': "3",
'NAGIOS_SERVICEISVOLATILE': "0",
'NAGIOS_SERVICEEVENTID': "56460",
'NAGIOS_LASTSERVICEEVENTID': "56405",
'NAGIOS_SERVICEPROBLEMID': "28201",
'NAGIOS_LASTSERVICEPROBLEMID': "0",
'NAGIOS_SERVICELATENCY': "0.357",
'NAGIOS_SERVICEEXECUTIONTIME': "0.000",
'NAGIOS_SERVICEDURATION': "0d 0h 0m 17s",
'NAGIOS_SERVICEDURATIONSEC': "17",
'NAGIOS_SERVICEDOWNTIME': "0",
'NAGIOS_SERVICEPERCENTCHANGE': "12.37",
'NAGIOS_SERVICEGROUPNAMES': "z_gorup,y_group",
'NAGIOS_LASTSERVICECHECK': "1260146052",
'NAGIOS_LASTSERVICESTATECHANGE': "1260146112",
'NAGIOS_LASTSERVICEOK': "1260146052",
'NAGIOS_LASTSERVICEWARNING': "1260091455",
'NAGIOS_LASTSERVIVECRITICAL': "1260146112",
'NAGIOS_LASTSERVICEUNKNOWN': "1257999616",
'NAGIOS_SERVICEOUTPUT': "PING CRITICAL - Packet loss = 60%, RTA = 0.38 ms",
'NAGIOS_LONGSERVICEOUTPUT': "Long Output\\nWith\\nextra lines",
'NAGIOS_SERVICEPERFDATA': "",
'NAGIOS_SERVICECHECKCOMMAND': "check_freshness",
'NAGIOS_SERVICEACTIONURL': "",
'NAGIOS_SERVICENOTESURL': "",
'NAGIOS_SERVICENOTES': "",
# Service Group Macros
'NAGIOS_SERVICEGROUPNAME': "z_group",
'NAGIOS_SERVICEGROUPALIAS': "Z Group",
'NAGIOS_SERVICEGROUPMEMBERS': "localhost,PING,otherhost,PING",
'NAGIOS_SERVICEGROUPNOTESURL': "",
'NAGIOS_SERVICEGROUPNOTES': "",
}
ENVIRONMENT_SERVICE.update(ENVIRONMENT_HOST)
class MacrosTestCase(unittest.TestCase):
def setUp(self):
self.macros = notify.Macros(ENVIRONMENT_SERVICE)
def testPrefix(self):
for key in self.macros:
self.failIf(key.startswith("NAGIOS_"))
def testNewlines(self):
|
def testMissing(self):
self.assertRaises(notify.MissingMacro,
lambda: self.macros['DOESNOTEXIST'])
class NotificationTest(unittest.TestCase):
def setUp(self):
self.macros = {
'host': notify.Macros(ENVIRONMENT_HOST),
'service': notify.Macros(ENVIRONMENT_SERVICE)}
self.config = coil.parse(notify.DEFAULT_CONFIG)
def testSubject(self):
for t in ('host', 'service'):
obj = notify.Notification(t, self.macros[t], self.config)
self.assert_(obj.subject())
def testBody(self):
for t in ('host', 'service'):
obj = notify.Notification(t, self.macros[t], self.config)
long = obj.body()
self.assert_(long)
self.failIf(re.search('{\w+}', long))
obj.format = "short"
short = obj.body()
self.assert_(short)
self.failIf(re.search('{\w+}', short))
self.assert_(len(short) < len(long))
def testURLs(self):
config = self.config.copy()
config['urls.nagios'] ="https://testURLs/zomg/nagios"
config['urls.graphs'] ="https://testURLs/zomg/graphs"
for t in ('host', 'service'):
obj = notify.Notification(t, self.macros[t], config)
urls = obj.urls()
self.assert_(urls['nagios'].startswith(config['urls.nagios']))
self.assert_(urls['graphs'].startswith(config['urls.graphs']))
| for key, value in self.macros.iteritems():
if key == "LONGSERVICEOUTPUT":
self.assert_(len(value.splitlines()) > 1)
else:
self.assert_(not value or len(value.splitlines()) == 1) |
sap_nl.ts | <TS language="nl" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Klik met de rechtermuisknop om het adres of label te wijzigen</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Maak een nieuw adres aan</translation>
</message>
<message>
<source>&New</source>
<translation>&Nieuw</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopieer het geselecteerde adres naar het klembord</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopiëren</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Verwijder het geselecteerde adres uit de lijst</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Verwijderen</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporteer de data in de huidige tab naar een bestand</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exporteren</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Sluiten</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Kies het adres om munten naar toe te sturen</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Kies het adres om munten op te ontvangen</translation>
</message>
<message>
<source>C&hoose</source>
<translation>K&iezen</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Verzendadressen</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Ontvangstadressen</translation>
</message>
<message>
<source>These are your SAPP addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Dit zijn uw SAPP adressen om betalingen mee te verzenden. Controleer altijd zowel het bedrag als het ontvangstadres voor het verzenden van geld.</translation>
</message>
<message>
<source>These are your SAPP addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Dit zijn uw SAPP adressen waarop betalingen kunnen worden ontvangen. Het wordt aangeraden om een nieuw ontvangstadres voor elke transactie te gebruiken.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Kopiëer Adres</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Kopiëer &Label</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Bewerken</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Exporteer Adreslijst</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommagescheiden bestand (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Export Mislukt</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Er was een fout opgetreden tijdens het opslaan van deze adreslijst naar %1. Probeer het nogmaals.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<source>(no label)</source>
<translation>(geen label)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Wachtwoordzin dialoog</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Voer wachtwoordzin in</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nieuwe wachtwoordzin</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Herhaal nieuwe wachtwoordzin</translation>
</message>
<message>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Dient om de triviale sendmoney uit te schakelen wanneer het OS account in gevaar is gebracht. Geeft geen echte veiligheid.</translation>
</message>
<message>
<source>For anonymization, automint, and staking only</source>
<translation>Alleen voor anonimisering, automint en staking</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Voer een nieuw wachtwoord in voor uw portemonnee.<br/>Gebruik een wachtwoord van <b>tien of meer willekeurige karakters</b>, of <b>acht of meer woorden</b>.</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Versleutel portemonnee</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Deze operatie vereist uw portemonneewachtwoord om de portemonnee te openen.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Open portemonnee</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Deze operatie vereist uw portemonneewachtwoord om de portemonnee te ontsleutelen</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Ontsleutel portemonnee</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Wijzig wachtwoord</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Voer de oude en nieuwe wachtwoordzin in voor uw portemonnee.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Bevestig versleuteling van de portemonnee</translation>
</message>
<message>
<source>SAPP will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your SAPs from being stolen by malware infecting your computer.</source>
<translation>SAPP zal nu afsluiten om het versleutelingsproces te voltooien. Onthoud dat het versleutelen van uw portemonnee u niet volledig kan beschermen tegen malware infecties op uw computer die uw SAPP's kunnen stelen.</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Weet u zeker dat u uw portemonnee wilt versleutelen?</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR SAPP</b>!</source>
<translation>Waarschuwing: Als u uw portemonnee versleutelt en uw wachtwoord vergeet, zult u <b>AL UW SAPP VERLIEZEN</b>!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Portemonnee versleuteld</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>BELANGRIJK: Elke eerder gemaakte backup van uw portemonneebestand dient te worden vervangen door het nieuwe gegenereerde, versleutelde portemonneebestand. Om veiligheidsredenen zullen eerdere backups van het niet-versleutelde portemonneebestand onbruikbaar worden zodra u uw nieuwe, versleutelde, portemonnee begint te gebruiken.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Portemonneeversleuteling mislukt</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Portemonneeversleuteling mislukt door een interne fout. Uw portemonnee is niet versleuteld.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>De opgegeven wachtwoordzinnen komen niet overeen.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Portemonnee openen mislukt</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>De opgegeven wachtwoordzin voor de portemonnee ontsleuteling is niet correct.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Portemonnee ontsleuteling mislukt</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Portemonnee wachtwoordzin is met succes gewijzigd.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Waarschuwing: De Caps-Lock-toets staat aan!</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<source>IP/Netmask</source>
<translation>IP/Netmask</translation>
</message>
<message>
<source>Banned Until</source>
<translation>Verboden tot</translation>
</message>
</context>
<context>
<name>Bip38ToolDialog</name>
<message>
<source>BIP 38 Tool</source>
<translation>BIP 38 Tool</translation>
</message>
<message>
<source>&BIP 38 Encrypt</source>
<translation>&BIP 38 Versleuteling</translation>
</message>
<message>
<source>Address:</source>
<translation>Adres:</translation>
</message>
<message>
<source>Enter a SAPP Address that you would like to encrypt using BIP 38. Enter a passphrase in the middle box. Press encrypt to compute the encrypted private key.</source>
<translation>Voer een SAPP adres in dat u wilt coderen met BIP 38. Voer een wachtwoordzin in in het middelste vak. Druk op versleutelen om de versleutelde privé sleutel te berekenen.</translation>
</message>
<message>
<source>The SAPP address to encrypt</source>
<translation>Het SAPP adres om te versleutelen</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Kies een eerder gebruikt adres</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Plak adres vanaf klembord</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Passphrase: </source>
<translation>Wachtwoord:</translation>
</message>
<message>
<source>Encrypted Key:</source>
<translation>Versleutelde sleutel:</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopieer de huidige handtekening naar het systeemklembord</translation>
</message>
<message>
<source>Encrypt the private key for this SAPP address</source>
<translation>Versleutel de privé sleutel voor dit SAPP adres</translation>
</message>
<message>
<source>Reset all fields</source>
<translation>Reset alle velden</translation>
</message>
<message>
<source>The encrypted private key</source>
<translation>De versleutelde privé sleutel</translation>
</message>
<message>
<source>Decrypt the entered key using the passphrase</source>
<translation>Decodeer de ingevoerde sleutel met behulp van de wachtwoordzin</translation>
</message>
<message>
<source>Encrypt &Key</source>
<translation>Versleutel &Sleutel</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Verwijder &Alles</translation>
</message>
<message>
<source>&BIP 38 Decrypt</source>
<translation>&BIP 38 ontsleuteling</translation>
</message>
<message>
<source>Enter the BIP 38 encrypted private key. Enter the passphrase in the middle box. Click Decrypt Key to compute the private key. After the key is decrypted, clicking 'Import Address' will add this private key to the wallet.</source>
<translation>Voer de BIP 38 versleutelde geheime sleutel in. Voer de wachtwoordzin in, in het middelste veld. Druk op Ontsleutel Sleutel om de privé sleutel te berekenen. Nadat de privé sleutel is ontsleuteld, zal het klikken op 'Adres Importeren' de privé sleutel toevoegen aan de portemonnee.</translation>
</message>
<message>
<source>Decrypt &Key</source>
<translation>Ontsleutelen &Sleutel</translation>
</message>
<message>
<source>Decrypted Key:</source>
<translation>Ontsleutelde sleutel:</translation>
</message>
<message>
<source>Import Address</source>
<translation>Adres Importeren</translation>
</message>
<message>
<source>Click "Decrypt Key" to compute key</source>
<translation>Klik "Ontsleutelen Sleutel" om de sleutel te berekenen</translation>
</message>
<message>
<source>The entered passphrase is invalid. </source>
<translation>De ingevoerde wachtwoordzin is ongeldig.</translation>
</message>
<message>
<source>Allowed: 0-9,a-z,A-Z,</source>
<translation>Toegestaan: 0-9,a-z,A-Z</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Het ingevoerde adres is ongeldig.</translation>
</message>
<message>
<source>Please check the address and try again.</source> | </message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Het opgegeven adres verwijst niet naar een sleutel.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Portemonnee ontsleuteling is geannuleerd.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Privé sleutel voor het ingevoerde adres is niet beschikbaar.</translation>
</message>
<message>
<source>Failed to decrypt.</source>
<translation>Ontsleutelen mislukt.</translation>
</message>
<message>
<source>Please check the key and passphrase and try again.</source>
<translation>Controleer de sleutel en de wachtwoordzin en probeer het opnieuw.</translation>
</message>
<message>
<source>Data Not Valid.</source>
<translation>Gegevens ongeldig.</translation>
</message>
<message>
<source>Please try again.</source>
<translation>Probeer het opnieuw.</translation>
</message>
<message>
<source>Please wait while key is imported</source>
<translation>Wacht tot de sleutel is geïmporteerd</translation>
</message>
<message>
<source>Key Already Held By Wallet</source>
<translation>Sleutel al aanwezig in portemonnee</translation>
</message>
<message>
<source>Error Adding Key To Wallet</source>
<translation>Fout bij het toevoegen van de sleutel</translation>
</message>
<message>
<source>Successfully Added Private Key To Wallet</source>
<translation>De privé sleutel is met succes toegevoegd aan de portemonnee</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Wallet</source>
<translation>Portemonnee</translation>
</message>
<message>
<source>Node</source>
<translation>Node</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Overzicht</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Toon algemeen overzicht van de portemonnee</translation>
</message>
<message>
<source>&Send</source>
<translation>&Verzenden</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Ontvangen</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transacties</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Blader door transactiegescheidenis</translation>
</message>
<message>
<source>Privacy Actions for zSAP</source>
<translation>Privacy Acties voor zSAP</translation>
</message>
<message>
<source>E&xit</source>
<translation>S&luiten</translation>
</message>
<message>
<source>Quit application</source>
<translation>Programma afsluiten</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Over &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Toon informatie over Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opties...</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Tonen / Verbergen</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Toon of verberg het hoofdvenster</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>$Versleutel portemonnee...</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Versleutel de privé sleutels die bij uw portemonnee horen</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Backup Portemonnee...</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Backup portemonnee naar een andere locatie</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Wijzig Wachtwoordzin...</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Wijzig de wachtwoordzin die gebruikt wordt voor uw portemonnee versleuteling</translation>
</message>
<message>
<source>&Unlock Wallet...</source>
<translation>&Open portemonnee...</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Open portemonnee</translation>
</message>
<message>
<source>&Lock Wallet</source>
<translation>&Sluit portemonnee</translation>
</message>
<message>
<source>Sign &message...</source>
<translation>Bericht &Ondertekenen...</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>Bericht &Verifiëren... </translation>
</message>
<message>
<source>&Information</source>
<translation>&Informatie</translation>
</message>
<message>
<source>Show diagnostic information</source>
<translation>Toon diagnostische informatie</translation>
</message>
<message>
<source>&Debug console</source>
<translation>&Debug console</translation>
</message>
<message>
<source>Open debugging console</source>
<translation>Open debugging console</translation>
</message>
<message>
<source>&Network Monitor</source>
<translation>&Netwerk Monitor</translation>
</message>
<message>
<source>Show network monitor</source>
<translation>Toon netwerk monitor</translation>
</message>
<message>
<source>&Peers list</source>
<translation>&Peers lijst</translation>
</message>
<message>
<source>Show peers info</source>
<translation>Toon informatie van peers</translation>
</message>
<message>
<source>Wallet &Repair</source>
<translation>Portemonnee &Repareren </translation>
</message>
<message>
<source>Show wallet repair options</source>
<translation>Toon portemonnee reparatie opties</translation>
</message>
<message>
<source>Open configuration file</source>
<translation>Open configuratie bestand</translation>
</message>
<message>
<source>Show Automatic &Backups</source>
<translation>Toon Automatische &Backups</translation>
</message>
<message>
<source>Show automatically created wallet backups</source>
<translation>Toon automatisch aangemaakte portemonnee backups</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Verzendadressen...</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Toon de lijst van gebruikte verzendadressen en labels</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Ontvangstadressen...</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Toon de lijst van gebruikte ontvangstadressen en labels</translation>
</message>
<message>
<source>&Multisignature creation...</source>
<translation>&Multisignature aanmaak...</translation>
</message>
<message>
<source>Create a new multisignature address and add it to this wallet</source>
<translation>Creëer een nieuw multisignature adres en voeg het toe aan deze portemonnee</translation>
</message>
<message>
<source>&Multisignature spending...</source>
<translation>&Multisignature spenderen...</translation>
</message>
<message>
<source>Spend from a multisignature address</source>
<translation>Besteden vanuit een multisignature adres</translation>
</message>
<message>
<source>&Multisignature signing...</source>
<translation>Ondertekenen van &Multisignature...</translation>
</message>
<message>
<source>Sign with a multisignature address</source>
<translation>Onderteken met een multisignature adres</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Open &URI...</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Opdrachtregelopties</translation>
</message>
<message numerus="yes">
<source>Processed %n blocks of transaction history.</source>
<translation><numerusform>%n blokken aan transactiegeschiedenis verwerkt.</numerusform><numerusform>%n blokken aan transactiegeschiedenis verwerkt.</numerusform></translation>
</message>
<message>
<source>Synchronizing additional data: %p%</source>
<translation>Synchroniseren aanvullende gegevens: %p%</translation>
</message>
<message>
<source>%1 behind. Scanning block %2</source>
<translation>%1 achter. Scannen van block %2</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b> for anonymization and staking only</source>
<translation>Portemonnee is <b>versleuteld</b> en momenteel <b>geopend</b>enkel voor anonimisering en staking</translation>
</message>
<message>
<source>&File</source>
<translation>&Bestand</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Instellingen</translation>
</message>
<message>
<source>&Tools</source>
<translation>&Gereedschap</translation>
</message>
<message>
<source>&Help</source>
<translation>&Help</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Tabblad werkbalk</translation>
</message>
<message>
<source>Sapphire Core</source>
<translation>Sapphire Core</translation>
</message>
<message>
<source>Send coins to a SAPP address</source>
<translation>Verstuur munten naar een SAPP adres</translation>
</message>
<message>
<source>Request payments (generates QR codes and sap: URIs)</source>
<translation>Vraag betaling aan (genereert QR codes en SAPP: URI's)</translation>
</message>
<message>
<source>&Privacy</source>
<translation>&Privacy</translation>
</message>
<message>
<source>&Masternodes</source>
<translation>&Masternodes</translation>
</message>
<message>
<source>Browse masternodes</source>
<translation>Bekijk masternodes</translation>
</message>
<message>
<source>&About Sapphire Core</source>
<translation>&Over Sapphire Core</translation>
</message>
<message>
<source>Show information about Sapphire Core</source>
<translation>Toon informatie over SAPP Kern</translation>
</message>
<message>
<source>Modify configuration options for SAPP</source>
<translation>Wijzig SAPP configuratie opties</translation>
</message>
<message>
<source>Sign messages with your SAPP addresses to prove you own them</source>
<translation>Onderteken berichten met uw SAPP adressen om te bewijzen dat deze adressen in uw bezit zijn</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified SAPP addresses</source>
<translation>Controleer berichten om te verzekeren dat deze ondertekend zijn met de gespecificeerde SAPP adressen</translation>
</message>
<message>
<source>&BIP38 tool</source>
<translation>&BIP38 tool</translation>
</message>
<message>
<source>Encrypt and decrypt private keys using a passphrase</source>
<translation>Versleutelen en ontsleutel privé sleutels door middel van een wachtwoordzin</translation>
</message>
<message>
<source>&MultiSend</source>
<translation>&MultiSend</translation>
</message>
<message>
<source>MultiSend Settings</source>
<translation>MultiSend instellingen</translation>
</message>
<message>
<source>Open Wallet &Configuration File</source>
<translation>Open Portemonnee &Configuratiebestand</translation>
</message>
<message>
<source>Open &Masternode Configuration File</source>
<translation>Open &Masternode Configuratiebestand</translation>
</message>
<message>
<source>Open Masternode configuration file</source>
<translation>Open Masternode configuratiebestand</translation>
</message>
<message>
<source>Open a SAPP: URI or payment request</source>
<translation>Open een SAPP: URI of betaalverzoek</translation>
</message>
<message>
<source>&Blockchain explorer</source>
<translation>&Blockchain verkenner</translation>
</message>
<message>
<source>Block explorer window</source>
<translation>Block verkenner venster</translation>
</message>
<message>
<source>Show the Sapphire Core help message to get a list with possible SAPP command-line options</source>
<translation>Toon het Sapphire Core help bericht om een lijst te krijgen met mogelijke SAPP command line opties</translation>
</message>
<message>
<source>Sapphire Core client</source>
<translation>Sapphire Core client</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to SAPP network</source>
<translation><numerusform>%n actieve verbindingen met het SAPP netwerk</numerusform><numerusform>%n actieve connectie(s) naar SAPP netwerk</numerusform></translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synchroniseren met het netwerk...</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Blocks aan het importeren vanaf schijf...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Blocks opnieuw aan het indexeren op de schijf...</translation>
</message>
<message>
<source>No block source available...</source>
<translation>Geen block bron beschikbaar</translation>
</message>
<message>
<source>Up to date</source>
<translation>Bijgewerkt</translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n uren</numerusform><numerusform>%n uren</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n day</numerusform><numerusform>%n dagen</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n weken</numerusform><numerusform>%n weken</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 en %2</translation>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation><numerusform>%n year</numerusform><numerusform>%n jaren</numerusform></translation>
</message>
<message>
<source>Catching up...</source>
<translation>Aan het bijwerken...</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Laatst ontvangen block was %1 geleden gegenereerd.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transacties hierna zullen nog niet zichtbaar zijn.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>Warning</source>
<translation>Waarschuwing</translation>
</message>
<message>
<source>Information</source>
<translation>Informatie</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Verzend transactie</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Inkomende transactie</translation>
</message>
<message>
<source>Sent MultiSend transaction</source>
<translation>Verzonden MultiSend transactie</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1
Hoeveelheid: %2
Type: %3
Adres: %4
</translation>
</message>
<message>
<source>Staking is active
MultiSend: %1</source>
<translation>Staking is actief
Multisend: %1</translation>
</message>
<message>
<source>Active</source>
<translation>Actief</translation>
</message>
<message>
<source>Not Active</source>
<translation>Niet actief</translation>
</message>
<message>
<source>Staking is not active
MultiSend: %1</source>
<translation>Staking is niet actief
MultiSend: %1</translation>
</message>
<message>
<source>AutoMint is currently enabled and set to </source>
<translation>AutoMint is momenteel ingeschakeld en ingesteld op</translation>
</message>
<message>
<source>AutoMint is disabled</source>
<translation>AutoMint is uitgeschakeld</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Portemonnee is <b>versleuteld</b> en momenteel <b>geopend</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Portemonnee is versleuteld </b>en momenteel <b> vergrendeld</b></translation>
</message>
</context>
<context>
<name>BlockExplorer</name>
<message>
<source>Blockchain Explorer</source>
<translation>Blockchain Verkenner</translation>
</message>
<message>
<source>Back</source>
<translation>Terug</translation>
</message>
<message>
<source>Forward</source>
<translation>Volgende</translation>
</message>
<message>
<source>Address / Block / Transaction</source>
<translation>Adres / Block / Transactie</translation>
</message>
<message>
<source>Search</source>
<translation>Zoeken</translation>
</message>
<message>
<source>TextLabel</source>
<translation>TextLabel</translation>
</message>
<message>
<source>Not all transactions will be shown. To view all transactions you need to set txindex=1 in the configuration file (sap.conf).</source>
<translation>Niet alle transacties zullen worden getoond. Om alle transacties te bekijken moet u txindex=1 instellen in het configuratie bestand (sap.conf).</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Unknown: %5)</source>
<translation>Totaal: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Onbekend: %5)</translation>
</message>
<message>
<source>Network Alert</source>
<translation>Netwerk waarschuwing</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Quantity:</source>
<translation>Kwantiteit:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioriteit:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Fee:</translation>
</message>
<message>
<source>Coin Selection</source>
<translation>Munt Selectie</translation>
</message>
<message>
<source>Dust:</source>
<translation>Dust:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Na de fee:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wisselgeld:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(de)selecteer alles</translation>
</message>
<message>
<source>toggle lock state</source>
<translation>Schakel lock status</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Tree modus</translation>
</message>
<message>
<source>List mode</source>
<translation>Lijst modus</translation>
</message>
<message>
<source>(1 locked)</source>
<translation>(1 locked)</translation>
</message>
<message>
<source>Amount</source>
<translation>Bedrag</translation>
</message>
<message>
<source>Received with label</source>
<translation>Ontvangen met label</translation>
</message>
<message>
<source>Received with address</source>
<translation>Ontvangen met adres</translation>
</message>
<message>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Bevestigingen</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bevestigd</translation>
</message>
<message>
<source>Priority</source>
<translation>Prioriteit</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopieer adres</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopiëer label</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopieer bedrag</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopieer transactie ID</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Lock niet besteed</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Unlock niet besteed</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopieer kwantiteit</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopieer fee</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopieer na fee</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopieer bytes</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopieer prioriteit</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Kopieer dust</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopieer wisselgeld</translation>
</message>
<message>
<source>Please switch to "List mode" to use this function.</source>
<translation>Schakel alsjeblieft over naar "Lijst modus" om deze functie te gebruiken.</translation>
</message>
<message>
<source>highest</source>
<translation>hoogste</translation>
</message>
<message>
<source>higher</source>
<translation>hoger</translation>
</message>
<message>
<source>high</source>
<translation>hoog</translation>
</message>
<message>
<source>medium-high</source>
<translation>medium-hoog</translation>
</message>
<message>
<source>medium</source>
<translation>medium</translation>
</message>
<message>
<source>low-medium</source>
<translation>laag-medium</translation>
</message>
<message>
<source>low</source>
<translation>laag</translation>
</message>
<message>
<source>lower</source>
<translation>lager</translation>
</message>
<message>
<source>lowest</source>
<translation>laagst</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 vergrendeld)</translation>
</message>
<message>
<source>none</source>
<translation>geen</translation>
</message>
<message>
<source>yes</source>
<translation>ja</translation>
</message>
<message>
<source>no</source>
<translation>nee</translation>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Dit label wordt rood, als de transactiegrootte groter is dan 1000 bytes.</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Dit betekent dat een fee van minstens %1 per kB vereist is.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Kan variëren van +/- 1 byte per invoer.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transacties met hogere prioriteit zullen waarschijnlijk eerder in een blok opgenomen worden.</translation>
</message>
<message>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>Dit label wordt rood, als de prioriteit lager is dan "medium".</translation>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Dit label wordt rood, als een ontvanger een hoeveelheid kleiner dan %1 ontvangt.</translation>
</message>
<message>
<source>Can vary +/- %1 usap per input.</source>
<translation>Kan +/-%1 usap per invoer variëren.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(geen label)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>wijzig van %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(wijzig)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Wijzig Adres</translation>
</message>
<message>
<source>&Label</source>
<translation>&Label</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>Het label dat geassocieerd is met deze adreslijst invoer</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adres</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>Het adres geassocieerd met deze adreslijst invoer. Dit kan alleen worden gewijzigd voor verzend adressen.</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Nieuw ontvangst adres</translation>
</message>
<message>
<source>New sending address</source>
<translation>Nieuw verzend adres</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Wijzig ontvangst adres</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Wijzig verzend adres</translation>
</message>
<message>
<source>The entered address "%1" is not a valid SAPP address.</source>
<translation>Het ingevoerde adres: "%1" is geen geldig SAPP adres.</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>Het ingevoerde adres: "%1" staat al in uw adresboek.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Kan portemonnee niet ontgrendelen.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Aanmaken nieuwe key mislukt.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Een nieuwe data directory zal worden aangemaakt.</translation>
</message>
<message>
<source>name</source>
<translation>naam</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Map bestaat reeds. Voeg %1 toe als je van plan bent om hier een nieuwe map te creëren.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Pad bestaat al en is geen directory.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Kan de data directory hier niet aanmaken.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>versie</translation>
</message>
<message>
<source>Sapphire Core</source>
<translation>Sapphire Core</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
<message>
<source>About Sapphire Core</source>
<translation>Over Sapphire Core</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Command-line opties</translation>
</message>
<message>
<source>Usage:</source>
<translation>Gebruik:</translation>
</message>
<message>
<source>command-line options</source>
<translation>command-line opties</translation>
</message>
<message>
<source>UI Options:</source>
<translation>UI Opties:</translation>
</message>
<message>
<source>Choose data directory on startup (default: %u)</source>
<translation>Kies data directory bij opstarten (standaard: %u)</translation>
</message>
<message>
<source>Show splash screen on startup (default: %u)</source>
<translation>Toon splash scherm bij opstarten (standaard: %u)</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Stel taal in, bijvoorbeeld "de_DE" (standaard: systeem locale)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Geminimaliseerd starten</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Stel SSL root certificaten in voor betalingsverzoek (standaard: -systeem-)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Welkom</translation>
</message>
<message>
<source>Welcome to Sapphire Core.</source>
<translation>Welkom bij Sapphire Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Sapphire Core will store its data.</source>
<translation>Aangezien dit de eerste keer is dat het programma is gestart, kun je kiezen waar Sapphire Core zijn data opslaat.</translation>
</message>
<message>
<source>Sapphire Core will download and store a copy of the SAPP block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>Sapphire Core zal een kopie van de SAPP blockchain downloaden en opslaan. Tenminste %1GB aan data zal worden opgeslagen in deze map en het zal over de tijd groeien. De portemonnee zal ook in deze map worden opgeslagen.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Gebruik de standaard datafolder </translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Gebruik een aangepaste data directory:</translation>
</message>
<message>
<source>Sapphire Core</source>
<translation>Sapphire Core</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Fout: opgegeven data directory "%1" kan niet worden gemaakt.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>%1 GB of free space available</source>
<translation>%1 GB beschikbare schijfruimte</translation>
</message>
<message>
<source>(of %1 GB needed)</source>
<translation>(van de %1 GB benodigd)</translation>
</message>
</context>
<context>
<name>MasternodeList</name>
<message>
<source>Form</source>
<translation>Formulier</translation>
</message>
<message>
<source>MASTERNODES</source>
<translation>MASTERNODES</translation>
</message>
<message>
<source>Note: Status of your masternodes in local wallet can potentially be slightly incorrect.<br />Always wait for wallet to sync additional data and then double check from another node<br />if your node should be running but you still see "MISSING" in "Status" field.</source>
<translation>Opmerking: De status van uw masternodes in de lokale portemonnee kan mogelijk iets afwijken. <br />Wacht altijd op de portemonnee om additionele data te synchroniseren en verifieer dit vanuit een andere node <br />mocht de node nu aan het draaien zijn maar je ziet nog steeds 'MISSING' in het 'Status' veld.</translation>
</message>
<message>
<source>Alias</source>
<translation>Alias</translation>
</message>
<message>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<source>Protocol</source>
<translation>Protocol</translation>
</message>
<message>
<source>Status</source>
<translation>Status</translation>
</message>
<message>
<source>Active</source>
<translation>Actief</translation>
</message>
<message>
<source>Last Seen (UTC)</source>
<translation>Laatst gezien (UTC)</translation>
</message>
<message>
<source>Pubkey</source>
<translation>Pubkey</translation>
</message>
<message>
<source>S&tart alias</source>
<translation>S&tart alias</translation>
</message>
<message>
<source>Start &all</source>
<translation>&all starten</translation>
</message>
<message>
<source>Start &MISSING</source>
<translation>Start &MISSING</translation>
</message>
<message>
<source>&Update status</source>
<translation>&Status bijwerken</translation>
</message>
<message>
<source>Status will be updated automatically in (sec):</source>
<translation>Status wordt automatisch bijgewerkt in (sec):</translation>
</message>
<message>
<source>0</source>
<translation>0</translation>
</message>
<message>
<source>Start alias</source>
<translation>Start alias</translation>
</message>
<message>
<source>Confirm masternode start</source>
<translation>Bevestig start masternode</translation>
</message>
<message>
<source>Are you sure you want to start masternode %1?</source>
<translation>Weet je zeker dat je masternode %1 wilt starten?</translation>
</message>
<message>
<source>Confirm all masternodes start</source>
<translation>Bevestig start alle masternodes</translation>
</message>
<message>
<source>Are you sure you want to start ALL masternodes?</source>
<translation>Weet je zeker dat je ALLE masternodes wilt starten?</translation>
</message>
<message>
<source>Command is not available right now</source>
<translation>Commando is nu niet beschikbaar</translation>
</message>
<message>
<source>You can't use this command until masternode list is synced</source>
<translation>Je kunt dit commando niet gebruiken tot de masternode lijst is gesynchroniseerd</translation>
</message>
<message>
<source>Confirm missing masternodes start</source>
<translation>Bevestig start ontbrekende masternodes</translation>
</message>
<message>
<source>Are you sure you want to start MISSING masternodes?</source>
<translation>Weet je zeker dat je alle ONTBREKENDE masternodes wilt starten?</translation>
</message>
</context>
<context>
<name>MultiSendDialog</name>
<message>
<source>MultiSend</source>
<translation>MultiSend</translation>
</message>
<message>
<source>Enter whole numbers 1 - 100</source>
<translation>Geef volledige cijfers in 1 - 100</translation>
</message>
<message>
<source>Enter % to Give (1-100)</source>
<translation>Voer % in om te geven (1-100)</translation>
</message>
<message>
<source>Enter Address to Send to</source>
<translation>Vul het adres in om naar te verzenden</translation>
</message>
<message>
<source>MultiSend allows you to automatically send up to 100% of your stake or masternode reward to a list of other SAPP addresses after it matures.
To Add: enter percentage to give and SAPP address to add to the MultiSend vector.
To Delete: Enter address to delete and press delete.
MultiSend will not be activated unless you have clicked Activate</source>
<translation>MultiSend stelt je in staat om automatisch tot 100% van uw stake of masternode beloning automatisch te verzenden naar een lijst van andere SAPP adressen nadat het volwassen is.
Toevoegen: voer het te geven percentage in en SAPP adres om toe te voegen aan de MultiSend vector.
Verwijderen: Voer adres in om te verwijderen en druk op delete.
MultiSend zal niet worden geactiveerd tenzij je op Activeer hebt geklikt</translation>
</message>
<message>
<source>Add to MultiSend Vector</source>
<translation>Aan MultiSend Vector toevoegen</translation>
</message>
<message>
<source>Add</source>
<translation>Toevoegen</translation>
</message>
<message>
<source>Deactivate MultiSend</source>
<translation>Deactiveer MultiSend</translation>
</message>
<message>
<source>Deactivate</source>
<translation>Deactiveren</translation>
</message>
<message>
<source>Choose an address from the address book</source>
<translation>Kies een adres uit het adresboek</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Percentage of stake to send</source>
<translation>Staking percentage om te verzenden</translation>
</message>
<message>
<source>Percentage:</source>
<translation>Percentage:</translation>
</message>
<message>
<source>Address to send portion of stake to</source>
<translation>Adres om een deel van de stake te verzenden naar</translation>
</message>
<message>
<source>Address:</source>
<translation>Adres:</translation>
</message>
<message>
<source>Label:</source>
<translation>Label:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Voer een label in voor dit adres om het toe te voegen aan jouw adresboek</translation>
</message>
<message>
<source>Delete Address From MultiSend Vector</source>
<translation>Verwijder adres van MultiSend Vector</translation>
</message>
<message>
<source>Delete</source>
<translation>Wissen</translation>
</message>
<message>
<source>Activate MultiSend</source>
<translation>Activeer MultiSend</translation>
</message>
<message>
<source>Activate</source>
<translation>Activeren</translation>
</message>
<message>
<source>View MultiSend Vector</source>
<translation>Bekijk MultiSend Vector</translation>
</message>
<message>
<source>View MultiSend</source>
<translation>Bekijk MultiSend</translation>
</message>
<message>
<source>Send For Stakes</source>
<translation>Verstuur voor stakes</translation>
</message>
<message>
<source>Send For Masternode Rewards</source>
<translation>Verstuur voor Masternode beloningen</translation>
</message>
<message>
<source>(no label)</source>
<translation>(geen label)</translation>
</message>
<message>
<source>The entered address:
</source>
<translation>Het ingevoerde adres:
</translation>
</message>
<message>
<source> is invalid.
Please check the address and try again.</source>
<translation>is ongeldig.
Controleer het adres alsjeblieft en probeer het opnieuw.</translation>
</message>
<message>
<source>The total amount of your MultiSend vector is over 100% of your stake reward
</source>
<translation>De totale hoeveelheid van je MultiSend vector is meer dan 100% van je stake beloning
</translation>
</message>
<message>
<source>Please Enter 1 - 100 for percent.</source>
<translation>Vul alsjeblieft 1 - 100 voor procent in.</translation>
</message>
<message>
<source>Saved the MultiSend to memory, but failed saving properties to the database.
</source>
<translation>De MultiSend in het geheugen opgeslagen, maar mislukt om de eigenschappen op te slaan in de database.
</translation>
</message>
<message>
<source>MultiSend Vector
</source>
<translation>MultiSend Vector
</translation>
</message>
<message>
<source>Removed </source>
<translation>Verwijderd</translation>
</message>
<message>
<source>Could not locate address
</source>
<translation>Kan het adres niet lokaliseren
</translation>
</message>
</context>
<context>
<name>MultisigDialog</name>
<message>
<source>Multisignature Address Interactions</source>
<translation>Multisignature Adres Interacties</translation>
</message>
<message>
<source>Create MultiSignature &Address</source>
<translation>Creëer MultiSignature &Adres</translation>
</message>
<message>
<source>How many people must sign to verify a transaction</source>
<translation>Hoeveel mensen moeten ondertekenen om een transactie te verifiëren</translation>
</message>
<message>
<source>Enter the minimum number of signatures required to sign transactions</source>
<translation>Voer het minimale aantal vereiste ondertekeningen in om de transacties te ondertekenen</translation>
</message>
<message>
<source>Address Label:</source>
<translation>Adreslabel:</translation>
</message>
<message>
<source>Add another address that could sign to verify a transaction from the multisig address.</source>
<translation>Voeg een ander adres toe die kan ondertekenen om een transactie vanuit het multisig adres te verifiëren.</translation>
</message>
<message>
<source>&Add Address / Key</source>
<translation>&Voeg Adres / Sleutel toe</translation>
</message>
<message>
<source>Local addresses or public keys that can sign:</source>
<translation>Lokale adressen of publieke sleutels die kunnen ondertekenen:</translation>
</message>
<message>
<source>Create a new multisig address</source>
<translation>Creëer een nieuw multisig adres</translation>
</message>
<message>
<source>C&reate</source>
<translation>C&reëer</translation>
</message>
<message>
<source>Status:</source>
<translation>Status:</translation>
</message>
<message>
<source>Use below to quickly import an address by its redeem. Don't forget to add a label before clicking import!
Keep in mind, the wallet will rescan the blockchain to find transactions containing the new address.
Please be patient after clicking import.</source>
<translation>Gebruik onderstaande om snel een adres te importeren door in te wisselen. Vergeet niet om een label toe te voegen voordat u op importeren klikt!
Houd er rekening mee dat de portemonnee de blockchain opnieuw doorzoekt om transacties te vinden die het nieuwe adres bevatten.
Waas alsjeblieft geduldig nadat u op importeren hebt geklikt.</translation>
</message>
<message>
<source>&Import Redeem</source>
<translation>&Importeer Inlossen</translation>
</message>
<message>
<source>&Create MultiSignature Tx</source>
<translation>&Creëer MultiSignature Tx</translation>
</message>
<message>
<source>Inputs:</source>
<translation>Inputs</translation>
</message>
<message>
<source>Coin Control</source>
<translation>Munt Controle</translation>
</message>
<message>
<source>Quantity Selected:</source>
<translation>Hoeveelheid geselecteerd:</translation>
</message>
<message>
<source>0</source>
<translation>0</translation>
</message>
<message>
<source>Amount:</source>
<translation>Bedrag:</translation>
</message>
<message>
<source>Add an input to fund the outputs</source>
<translation>Voeg een invoer toe om de uitvoer te financieren</translation>
</message>
<message>
<source>Add a Raw Input</source>
<translation>Voer een Raw invoer in</translation>
</message>
<message>
<source>Address / Amount:</source>
<translation>Adres / Bedrag:</translation>
</message>
<message>
<source>Add destinations to send SAPP to</source>
<translation>Voeg bestemmingen toe om SAPP naar te verzenden</translation>
</message>
<message>
<source>Add &Destination</source>
<translation>Voeg &Bestemming toe</translation>
</message>
<message>
<source>Create a transaction object using the given inputs to the given outputs</source>
<translation>Maak een transactie object aan met behulp van de gegeven invoer voor de gegeven uitvoer</translation>
</message>
<message>
<source>Cr&eate</source>
<translation>Cr&eëer</translation>
</message>
<message>
<source>&Sign MultiSignature Tx</source>
<translation>&Onderteken MultiSignature Tx</translation>
</message>
<message>
<source>Transaction Hex:</source>
<translation>Transactie Hex:</translation>
</message>
<message>
<source>Sign the transaction from this wallet or from provided private keys</source>
<translation>Onderteken de transactie vanuit deze portemonnee of vanuit aangeboden privésleutels</translation>
</message>
<message>
<source>S&ign</source>
<translation>O&nderteken</translation>
</message>
<message>
<source><html><head/><body><p>DISABLED until transaction has been signed enough times.</p></body></html></source>
<translation><html><head/><body><p>UITGESCHAKELD totdat de transactie voldoende is ondertekend.</p></body></html></translation>
</message>
<message>
<source>Co&mmit</source>
<translation>&Engageer</translation>
</message>
<message>
<source>Add private keys to sign the transaction with</source>
<translation>Voeg privésleutels toe om de transactie met te ondertekenen</translation>
</message>
<message>
<source>Add Private &Key</source>
<translation>Voeg &Privésleutel toe</translation>
</message>
<message>
<source>Sign with only private keys (Not Recommened)</source>
<translation>Onderteken met enkel privésleutels (Niet Aanbevolen)</translation>
</message>
<message>
<source>Invalid Tx Hash.</source>
<translation>Ongeldige Tx Hash.</translation>
</message>
<message>
<source>Vout position must be positive.</source>
<translation>Je positie moet positief zijn. </translation>
</message>
<message>
<source>Maximum possible addresses reached. (15)</source>
<translation>Maximale mogelijke adressen bereikt. (15)</translation>
</message>
<message>
<source>Vout Position: </source>
<translation>Jouw Positie:</translation>
</message>
<message>
<source>Amount: </source>
<translation>Hoeveelheid:</translation>
</message>
<message>
<source>Maximum (15)</source>
<translation>Maximum (15)</translation>
</message>
</context>
<context>
<name>ObfuscationConfig</name>
<message>
<source>Configure Obfuscation</source>
<translation>Configureer verduistering</translation>
</message>
<message>
<source>Basic Privacy</source>
<translation>Basis Privacy</translation>
</message>
<message>
<source>High Privacy</source>
<translation>Hoge Privacy</translation>
</message>
<message>
<source>Maximum Privacy</source>
<translation>Maximum Privacy</translation>
</message>
<message>
<source>Please select a privacy level.</source>
<translation>Selecteer de privacy level.</translation>
</message>
<message>
<source>Use 2 separate masternodes to mix funds up to 10000 SAPP</source>
<translation>Gebruik 2 aparte masternodes om fondsen te mixen tot 10000 SAPP</translation>
</message>
<message>
<source>Use 8 separate masternodes to mix funds up to 10000 SAPP</source>
<translation>Gebruik 8 aparte masternodes om fondsen te mixen tot 10000 SAPP</translation>
</message>
<message>
<source>Use 16 separate masternodes</source>
<translation>Gebruik 16 aparte masternodes</translation>
</message>
<message>
<source>This option is the quickest and will cost about ~0.025 SAPP to anonymize 10000 SAPP</source>
<translation>Deze optie is het snelst en kost ongeveer ~0,025 SAPP om 10000 SAPP te anonimiseren</translation>
</message>
<message>
<source>This option is moderately fast and will cost about 0.05 SAPP to anonymize 10000 SAPP</source>
<translation>Deze optie is gematigd snel en kost ongeveer 0,05 SAPP om 10000 SAPP te anonimiseren</translation>
</message>
<message>
<source>This is the slowest and most secure option. Using maximum anonymity will cost</source>
<translation>Dit is de langzaamste en veiligste optie. Het gebruik van maximale anonimiteit kost</translation>
</message>
<message>
<source>0.1 SAPP per 10000 SAPP you anonymize.</source>
<translation>je anonimiseert 0,1 SAPP per 10000 SAPP.</translation>
</message>
<message>
<source>Obfuscation Configuration</source>
<translation>Verduistering configuratie</translation>
</message>
<message>
<source>Obfuscation was successfully set to basic (%1 and 2 rounds). You can change this at any time by opening SAPP's configuration screen.</source>
<translation>Verduistering is succesvol ingesteld op basic (%1 en 2 rondes). Je kunt dit op elk gewenst moment wijzigen door het configuratiescherm van SAPP te openen.</translation>
</message>
<message>
<source>Obfuscation was successfully set to high (%1 and 8 rounds). You can change this at any time by opening SAPP's configuration screen.</source>
<translation>Verduistering is succesvol ingesteld op hoog (%1 en 8 rondes). U kunt dit op elk gewenst moment wijzigen door het configuratiescherm van SAPP te openen.</translation>
</message>
<message>
<source>Obfuscation was successfully set to maximum (%1 and 16 rounds). You can change this at any time by opening SAPP's configuration screen.</source>
<translation>Verduistering is succesvol ingesteld op hoog (%1 en 16 rondes). U kunt dit op elk gewenst moment wijzigen door het configuratiescherm van SAPP te openen.</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>URI openen</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Open betalingsverzoek van URL of bestand</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Selecteer betalingsverzoek bestand</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Selecteer betalingsverzoek bestand dat geopend moet worden</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opties</translation>
</message>
<message>
<source>&Main</source>
<translation>&Algemeen</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Grootte van cache &gegevens</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Aantal script en verificatie threads</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = auto, <0 = laat dat aantal cores vrij)</translation>
</message>
<message>
<source>W&allet</source>
<translation>P&ortemonnee</translation>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction<br/>cannot be used until that transaction has at least one confirmation.<br/>This also affects how your balance is computed.</source>
<translation>Als je de uitgaven van onbevestigde wijzigingen uitschakelt, de wijziging van een transactie<br/>kan niet worden gebruikt tot die transactie tenminste één bevestiging heeft.<br/>Dit beïnvloedt ook hoe uw saldo is berekend.</translation>
</message>
<message>
<source>Automatically open the SAPP client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Open de SAPP client poort automatisch op de router. Dit werkt alleen als je router UPnP ondersteunt en het is ingeschakeld.</translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Accepteer externe connecties</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Sta inkomende connecties toe</translation>
</message>
<message>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>& Verbind via SOCKS5 proxy (standaard proxy):</translation>
</message>
<message>
<source>Expert</source>
<translation>Expert</translation>
</message>
<message>
<source>Automatically start SAPP after logging in to the system.</source>
<translation>SAPP automatisch opstarten na inloggen op het systeem.</translation>
</message>
<message>
<source>&Start SAPP on system login</source>
<translation>&SAPP starten bij systeemlogin</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Of je de muntcontrolefuncties wilt tonen of niet.</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>Munt- en controlefuncties inschakelen</translation>
</message>
<message>
<source>Show additional tab listing all your masternodes in first sub-tab<br/>and all masternodes on the network in second sub-tab.</source>
<translation>Toon extra tabblad waarin al je masternodes worden vermeld in het eerste sub-tabblad<br/>en alle masternodes op het netwerk in het tweede sub-tabblad.</translation>
</message>
<message>
<source>Show Masternodes Tab</source>
<translation>Toon Masternodes Tab</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>& Besteed onbevestigd wisselgeld</translation>
</message>
<message>
<source>&Network</source>
<translation>&Netwerk</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting SAPP.</source>
<translation>De gebruikersinterface taal kan hier ingesteld worden. Deze instelling zal uitgevoerd na herstart van SAPP.</translation>
</message>
<message>
<source>Language missing or translation incomplete? Help contributing translations here:
https://www.transifex.com/SapphireCoreCoin/SAP-project-translations</source>
<translation>Taal ontbreekt of vertaling onvolledig? Help om bij te dragen aan vertalingen:
https://www.transifex.com/SapphireCoreCoin/SAP-project-translations</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Map poort gebruikt &UPnP</translation>
</message>
<message>
<source>Enable automatic minting of SAPP units to zSAP</source>
<translation>Schakel automatisch slaan van SAPP-eenheden in op zSAP</translation>
</message>
<message>
<source>Enable zSAP Automint</source>
<translation>Schakel zSAP Automint in</translation>
</message>
<message>
<source>Percentage of incoming SAPP which get automatically converted to zSAP via Zerocoin Protocol (min: 10%)</source>
<translation>Percentage inkomende SAPP die automatisch wordt omgezet naar zSAP via Zerocoin Protocol (min: 10%)</translation>
</message>
<message>
<source>Percentage of autominted zSAP</source>
<translation>Percentage autominted zSAP</translation>
</message>
<message>
<source>Wait with automatic conversion to Zerocoin until enough SAPP for this denomination is available</source>
<translation>Wacht met automatische omzetting naar Zerocoin totdat er genoeg SAPP voor deze denominatie beschikbaar is</translation>
</message>
<message>
<source>Preferred Automint zSAP Denomination</source>
<translation>Gewenste Automint zSAP denominatie</translation>
</message>
<message>
<source>Stake split threshold:</source>
<translation>Inzet splitdrempel:</translation>
</message>
<message>
<source>Connect to the SAPP network through a SOCKS5 proxy.</source>
<translation>Maak verbinding met het SAPP netwerk via een SOCKS5 proxy.</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Proxy &IP:</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>IP adres van de proxy (bijvoorbeeld IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Poort:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Proxy-poort (v.b. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>&Venster</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Toon alleen een tray icoon na minimalisering van het venster.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>& Minimaliseer naar de tray in plaats van de taakbalk</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimaliseer in plaats van de applicatie te verlaten wanneer het venster wordt gesloten. Wanneer deze optie is ingeschakeld, wordt de applicatie alleen gesloten nadat in het menu Quit wordt gekozen.</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>Minimaliseer bij sluiten</translation>
</message>
<message>
<source>&Display</source>
<translation>&Weergave</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>Gebruiksinterface &language:</translation>
</message>
<message>
<source>User Interface Theme:</source>
<translation>Gebruiksinterface thema:</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>Toon &Unit in volgende hoeveelheden:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Kies de standaard onderverdelingseenheid die in de interface wordt weergegeven en bij het verzenden van munten.</translation>
</message>
<message>
<source>Decimal digits</source>
<translation>Decimale cijfers</translation>
</message>
<message>
<source>Hide empty balances</source>
<translation>Verberg lege saldi</translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>URL's van derden (bijvoorbeeld een blok verkenner) die in het tabblad transacties verschijnen als contextmenu items. %s in de URL wordt vervangen door transactie hash. Meerdere URL's worden gescheiden door verticale balk |.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>Transactie URL's van derden</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Actieve command line opties die bovenstaande opties overschrijven:</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Reset alle client instellingen naar standaardinstellingen.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Herstellings Opties</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Annuleren</translation>
</message>
<message>
<source>Any</source>
<translation>een</translation>
</message>
<message>
<source>default</source>
<translation>standaard</translation>
</message>
<message>
<source>none</source>
<translation>geen</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Bevestig reset instellingen</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Client herstart vereist om wijzigingen te activeren.</translation>
</message>
<message>
<source>Client will be shutdown, do you want to proceed?</source>
<translation>Client wordt uitgeschakeld, wil je doorgaan?</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Deze wijziging vereist een herstart van de client.</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>Het opgegeven proxyadres is ongeldig.</translation>
</message>
<message>
<source>The supplied proxy port is invalid.</source>
<translation>De meegeleverde proxy-poort is ongeldig.</translation>
</message>
<message>
<source>The supplied proxy settings are invalid.</source>
<translation>De geleverde proxy-instellingen zijn ongeldig.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formulier</translation>
</message>
<message>
<source>Available:</source>
<translation>Beschikbaar:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Uw huidige bestedingsruimte</translation>
</message>
<message>
<source>Total Balance, including all unavailable coins.</source>
<translation>Totaal saldo, inclusief alle niet-beschikbare munten.</translation>
</message>
<message>
<source>SAPP Balance</source>
<translation>SAPP Saldi</translation>
</message>
<message>
<source>Pending:</source>
<translation>In afwachting:</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Totaal aantal transacties die nog niet zijn bevestigd en nog niet meetellen in het uitgeefbare saldo</translation>
</message>
<message>
<source>Immature:</source>
<translation>Immatuur:</translation>
</message>
<message>
<source>Staked or masternode rewards that has not yet matured</source>
<translation>Staked of masternode beloningen die nog niet volwassen zijn</translation>
</message>
<message>
<source>Current locked balance in watch-only addresses</source>
<translation>Huidige vergrendelde balans in alleen-lezen-adressen</translation>
</message>
<message>
<source>Your current SAPP balance, unconfirmed and immature transactions included</source>
<translation>Uw huidige SAPP-saldo, onbevestigde en onvolgroeide transacties inbegrepen</translation>
</message>
<message>
<source>SAPP Rewards</source>
<translation>SAPP-beloningen</translation>
</message>
<message>
<source>Mature: more than 20 confirmation and more than 1 mint of the same denomination after it was minted.
These zSAP are spendable.</source>
<translation>Volwassen: meer dan 20 bevestigingen en meer dan 1 mint van dezelfde denominatie nadat het gemint was.
Deze zPIB zijn besteedbaar.</translation>
</message>
<message>
<source>Unconfirmed: less than 20 confirmations
Immature: confirmed, but less than 1 mint of the same denomination after it was minted</source>
<translation>Onbevestigd: minder dan 20 bevestigingen
Onvolwassen: bevestigd, maar minder dan 1 mint van dezelfde denominatie nadat het gemint was.</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the SAPP network after a connection is established, but this process has not completed yet.</source>
<translation>De weergegeven informatie kan verouderd zijn. Je portemonnee synchroniseert automatisch met het SAPP netwerk nadat een verbinding is opgezet, maar dit proces is nog niet afgerond.</translation>
</message>
<message>
<source>OVERVIEW</source>
<translation>OVERZICHT</translation>
</message>
<message>
<source>Balance (including unconfirmed and immature coins)</source>
<translation>Saldi (inclusief onbevestigde en onvolwassen munten)</translation>
</message>
<message>
<source>Balance</source>
<translation>Balans</translation>
</message>
<message>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Onbevestigde transacties naar watch-only adressen</translation>
</message>
<message>
<source>Staked or masternode rewards in watch-only addresses that has not yet matured</source>
<translation>Staked of masternode beloningen in watch-only adressen die nog niet volwassen zijn</translation>
</message>
<message>
<source>Total:</source>
<translation>Totaal:</translation>
</message>
<message>
<source>Current total balance in watch-only addresses</source>
<translation>Huidig totaal saldo in watch-only adressen</translation>
</message>
<message>
<source>Watch-only:</source>
<translation>Watch-only:</translation>
</message>
<message>
<source>Your current balance in watch-only addresses</source>
<translation>Je huidige saldo in watch-only adressen</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Uitgeefbaar:</translation>
</message>
<message>
<source>Locked SAPP or Masternode collaterals. These are excluded from zSAP minting.</source>
<translation>Vergrendelde SAPP of Masternode onderpanden. Deze zijn uitgesloten van de zSAP muntage.</translation>
</message>
<message>
<source>Locked:</source>
<translation>Vergrendeld:</translation>
</message>
<message>
<source>Unconfirmed:</source>
<translation>Onbevestigd:</translation>
</message>
<message>
<source>Your current zSAP balance, unconfirmed and immature zSAP included.</source>
<translation>Uw huidige zSAP-saldo, onbevestigde en onvolgroeide zSAP inbegrepen.</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Recente transacties</translation>
</message>
<message>
<source>out of sync</source>
<translation>niet gesynchroniseerd</translation>
</message>
<message>
<source>Current percentage of zSAP.
If AutoMint is enabled this percentage will settle around the configured AutoMint percentage (default = 10%).
</source>
<translation>Huidige percentage van zSAP
Als AutoMint ingeschakeld is zal deze percentage afhangen van de geconfigureerde AutoMint percentage (standaard = 10%).
</translation>
</message>
<message>
<source>AutoMint is currently enabled and set to </source>
<translation>AutoMint is momenteel ingeschakeld en ingesteld op</translation>
</message>
<message>
<source>To disable AutoMint add 'enablezeromint=0' in sap.conf.</source>
<translation>Om AutoMint uit te schakelen, voeg je 'enablezeromint=0' toe aan sap.conf.</translation>
</message>
<message>
<source>AutoMint is currently disabled.
To enable AutoMint change 'enablezeromint=0' to 'enablezeromint=1' in sap.conf</source>
<translation>AutoMint is momenteel uitgeschakeld.
Om AutoMint in te schakelend verander je 'enablezeromint=0' naar 'enablezeromint=1' in sap.conf</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>Payment request error</source>
<translation>Fout bij betalingsverzoek</translation>
</message>
<message>
<source>URI handling</source>
<translation>URL behandeling</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Betalingsverzoek ophalen URL is ongeldig: %1</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Betalingsaanvraag bestandsverwerking</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Ongeldig betalingsadres %1</translation>
</message>
<message>
<source>Cannot start sap: click-to-pay handler</source>
<translation>Kan sap niet starten: click-to-pay handler</translation>
</message>
<message>
<source>URI cannot be parsed! This can be caused by an invalid SAPP address or malformed URI parameters.</source>
<translation>URL kan niet ontleed worden! Dit kan worden veroorzaakt door een ongeldig SAPP adres of misvormde URL parameters.</translation>
</message>
<message>
<source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source>
<translation>Uw betalingsverzoek kan niet worden gelezen! Dit kan worden veroorzaakt door een ongeldig betalingsverzoek bestand.</translation>
</message>
<message>
<source>Payment request rejected</source>
<translation>Betalingsverzoek afgewezen</translation>
</message>
<message>
<source>Payment request network doesn't match client network.</source>
<translation>Het betalingsverzoek netwerk komt niet overeen met het client netwerk.</translation>
</message>
<message>
<source>Payment request has expired.</source>
<translation>Betalingsverzoek is verlopen.</translation>
</message>
<message>
<source>Payment request is not initialized.</source>
<translation>Betalingsverzoek is niet geïnitialiseerd. </translation>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Niet geverifieerde betalingsverzoeken naar aangepaste betaal scripts worden niet ondersteund.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Gevraagd betalingsbedrag van %1 is te klein (beschouwd als dust).</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Terugbetaling van %1</translation>
</message>
<message>
<source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source>
<translation>Betalingsverzoek %1 is te groot (%2 bytes, toegestaan %3 bytes).</translation>
</message>
<message>
<source>Payment request DoS protection</source>
<translation>Betalingsverzoek DoS bescherming</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Fout communiceren met %1: %2</translation>
</message>
<message>
<source>Payment request cannot be parsed!</source>
<translation>Betalingsaanvraag kan niet worden geanalyseerd!</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Bad response van server %1</translation>
</message>
<message>
<source>Network request error</source>
<translation>Netwerkverzoek fout</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Betaling erkend</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>Address/Hostname</source>
<translation>Adres/Hostnaam</translation>
</message>
<message>
<source>Version</source>
<translation>Versie</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Pingtijd</translation>
</message>
</context>
<context>
<name>PrivacyDialog</name>
<message>
<source>Zerocoin Actions:</source>
<translation>Zerocoin Acties:</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the SAPP network after a connection is established, but this process has not completed yet.</source>
<translation>De weergegeven informatie kan verouderd zijn. Je portemonnee synchroniseert automatisch met het SAPP netwerk nadat een verbinding is opgezet, maar dit proces is nog niet afgerond.</translation>
</message>
<message>
<source>Mint Zerocoin</source>
<translation>Mint Zerocoin</translation>
</message>
<message>
<source>0</source>
<translation>0</translation>
</message>
<message>
<source>zSAP</source>
<translation>zSAP</translation>
</message>
<message>
<source>Available for minting are coins which are confirmed and not locked or Masternode collaterals.</source>
<translation>Beschikbaar voor muntage zijn munten die bevestigd en niet vergrendeld of Masternode onderpanden zijn.</translation>
</message>
<message>
<source>Available for Minting:</source>
<translation>Beschikbaar voor minting:</translation>
</message>
<message>
<source>0.000 000 00 SAPP</source>
<translation>0.000 000 00 SAPP</translation>
</message>
<message>
<source>Reset Zerocoin Wallet DB. Deletes transactions that did not make it into the blockchain.</source>
<translation>Zerocoin Wallet DB opnieuw instellen. Verwijdert transacties die het niet in de blockchain hebben gered.</translation>
</message>
<message>
<source>Reset</source>
<translation>Resetten</translation>
</message>
<message>
<source>Coin Control...</source>
<translation>Munt controle...</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Kwantiteit:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Hoeveelheid:</translation>
</message>
<message>
<source>Rescan the complete blockchain for Zerocoin mints and their meta-data.</source>
<translation>Rescan de complete blockchain voor Zerocoin mints en hun meta-data.</translation>
</message>
<message>
<source>ReScan</source>
<translation>ReScan</translation>
</message>
<message>
<source>Status and/or Mesages from the last Mint Action.</source>
<translation>Status en/of Berichten van de laatste mint actie.</translation>
</message>
<message>
<source>PRIVACY</source>
<translation>PRIVACY</translation>
</message>
<message>
<source>Enter an amount of SAPP to convert to zSAP</source>
<translation>Vul het aantal SAPP in om te converteren naar zSAP</translation>
</message>
<message>
<source>zSAP Control</source>
<translation>zSAP Control</translation>
</message>
<message>
<source>zSAP Selected:</source>
<translation>zSAP geselecteerd:</translation>
</message>
<message>
<source>Quantity Selected:</source>
<translation>Hoeveelheid geselecteerd:</translation>
</message>
<message>
<source>Spend Zerocoin. Without 'Pay To:' address creates payments to yourself.</source>
<translation>Zerocoin besteden. Zonder 'Betaal aan:' adres ontstaan betalingen aan jezelf.</translation>
</message>
<message>
<source>Spend Zerocoin</source>
<translation>Besteed Zerocoin</translation>
</message>
<message>
<source>Available (mature and spendable) zSAP for spending</source>
<translation>Beschikbare (volwassen en uitgeefbaar) zSAP voor besteding</translation>
</message>
<message>
<source>Available Balance:</source>
<translation>Beschikbaar saldo:</translation>
</message>
<message>
<source>Available (mature and spendable) zSAP for spending
zSAP are mature when they have more than 20 confirmations AND more than 2 mints of the same denomination after them were minted</source>
<translation>Beschikbare (volwwassen en uitgeefbaar) zSAP voor besteding
zSAP zijn volwassen wanneer zij meer dan 20 bevestigingen hebben EN meer dan 2 muntages van dezelfde denominaties nadat zij gemunt zijn.</translation>
</message>
<message>
<source>0 zSAP</source>
<translation>0 zSAP</translation>
</message>
<message>
<source>Security Level for Zerocoin Transactions. More is better, but needs more time and resources.</source>
<translation>Beveiligingsniveau voor Zerocoin transacties. Meer is beter, maar heeft meer tijd en middelen nodig.</translation>
</message>
<message>
<source>Security Level:</source>
<translation>Beveiligings niveau:</translation>
</message>
<message>
<source>Security Level 1 - 100 (default: 42)</source>
<translation>Beveiligingsniveau 1 - 100 (standaard: 42)</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>Betaal &Naar:</translation>
</message>
<message>
<source>The SAPP address to send the payment to. Creates local payment to yourself when empty.</source>
<translation>Het SAPP adres om de betaling naar te verzenden. Maakt lokale betaling aan jezelf als je leeg bent.</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Kies een eerder gebruikt adres</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Plak adres vanaf klembord</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Vul een label in voor dit adres om deze toe te voegen aan de lijst met gebruikte adressen</translation>
</message>
<message>
<source>A&mount:</source>
<translation>&Hoeveelheid:</translation>
</message>
<message>
<source>Convert Change to Zerocoin (might cost additional fees)</source>
<translation>Zet wisselgeld om naar Zerocoin (mogelijk extra kosten)</translation>
</message>
<message>
<source>If checked, the wallet tries to minimize the returning change instead of minimizing the number of spent denominations.</source>
<translation>Indien aangevinkt, probeert de portemonnee het terugkerende wisselgeld te minimaliseren in plaats van het aantal uitgegeven denominaties te minimaliseren.</translation>
</message>
<message>
<source>Minimize Change</source>
<translation>Minimaliseer wisselgeld</translation>
</message>
<message>
<source>Information about the available Zerocoin funds.</source>
<translation>Informatie over de beschikbare Zerocoin fondsen.</translation>
</message>
<message>
<source>Zerocoin Stats:</source>
<translation>Zerocoin Statistieken:</translation>
</message>
<message>
<source>Total Balance including unconfirmed and immature zSAP</source>
<translation>Totale Saldi, inclusief onbevestigde en onvolwassen zSAP</translation>
</message>
<message>
<source>Total Zerocoin Balance:</source>
<translation>Totale Zerocoin Saldi:</translation>
</message>
<message>
<source>Denominations with value 1:</source>
<translation>Denominaties met waarde 1:</translation>
</message>
<message>
<source>Denom. with value 1:</source>
<translation>Denom. met waarde 1:</translation>
</message>
<message>
<source>Unconfirmed: less than 20 confirmations
Immature: confirmed, but less than 1 mint of the same denomination after it was minted</source>
<translation>Onbevestigd: minder dan 20 bevestigingen
Onvolwassen: bevestigd, maar minder dan 1 mint van dezelfde denominatie nadat het gemint was.</translation>
</message>
<message>
<source>Show the current status of automatic zSAP minting.
To change the status (restart required):
- enable: add 'enablezeromint=1' to sap.conf
- disable: add 'enablezeromint=0' to sap.conf
To change the percentage (no restart required):
- menu Settings->Options->Percentage of autominted zSAP
</source>
<translation>Toon de huidige status van automatische zSAP-markering.
Om de status te wijzigen (opnieuw opstarten vereist):
- enable: voeg 'enablezeromint = 1' toe aan sap.conf
- uitschakelen: voeg 'enablezeromint = 0' toe aan sap.conf
Om het percentage te wijzigen (geen herstart vereist):
- menu Instellingen-> Opties-> Percentage van geautomatiseerde zSAP
</translation>
</message>
<message>
<source>AutoMint Status</source>
<translation>AutoMint-status</translation>
</message>
<message>
<source>Global Supply:</source>
<translation>Wereldwijde levering:</translation>
</message>
<message>
<source>Denom. 1:</source>
<translation>Denom. 1:</translation>
</message>
<message>
<source>Denom. 5:</source>
<translation>Denom. 5:</translation>
</message>
<message>
<source>Denom. 10:</source>
<translation>Denom. 10:</translation>
</message>
<message>
<source>Denom. 50:</source>
<translation>Denom. 50:</translation>
</message>
<message>
<source>Denom. 100:</source>
<translation>Denom. 100:</translation>
</message>
<message>
<source>Denom. 500:</source>
<translation>Denom. 500:</translation>
</message>
<message>
<source>Denom. 1000:</source>
<translation>Denom. 1000:</translation>
</message>
<message>
<source>Denom. 5000:</source>
<translation>Denom. 5000:</translation>
</message>
<message>
<source>0 x</source>
<translation> 0 x</translation>
</message>
<message>
<source>Denominations with value 5:</source>
<translation>Denominaties met waarde 5:</translation>
</message>
<message>
<source>Denom. with value 5:</source>
<translation>Denom. met waarde 5:</translation>
</message>
<message>
<source>Denominations with value 10:</source>
<translation>Denominaties met waarde 10:</translation>
</message>
<message>
<source>Denom. with value 10:</source>
<translation>Denom. met waarde 10:</translation>
</message>
<message>
<source>Denominations with value 50:</source>
<translation>Denominaties met waarde 50:</translation>
</message>
<message>
<source>Denom. with value 50:</source>
<translation>Denom. met waarde 50:</translation>
</message>
<message>
<source>Denominations with value 100:</source>
<translation>Denominaties met waarde 100:</translation>
</message>
<message>
<source>Denom. with value 100:</source>
<translation>Denom. met waarde 100:</translation>
</message>
<message>
<source>Denominations with value 500:</source>
<translation>Denominaties met waarde 500:</translation>
</message>
<message>
<source>Denom. with value 500:</source>
<translation>Denom. met waarde 500:</translation>
</message>
<message>
<source>Denominations with value 1000:</source>
<translation>Denominaties met waarde 1000:</translation>
</message>
<message>
<source>Denom. with value 1000:</source>
<translation>Denom. met waarde 1000:</translation>
</message>
<message>
<source>Denominations with value 5000:</source>
<translation>Denominaties met waarde 5000:</translation>
</message>
<message>
<source>Denom. with value 5000:</source>
<translation>Denom. met waarde 5000:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioriteit:</translation>
</message>
<message>
<source>TextLabel</source>
<translation>TextLabel</translation>
</message>
<message>
<source>Fee:</source>
<translation>Kost:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Stof:</translation>
</message>
<message>
<source>no</source>
<translation>nee</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Onvoldoende saldo!</translation>
</message>
<message>
<source>Coins automatically selected</source>
<translation>Munten automatisch geselecteerd</translation>
</message>
<message>
<source>medium</source>
<translation>medium</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Munt controle kenmerken</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Als dit geactiveerd is, maar het wisselgeld adres is leeg of ongeldig, wordt het wisselgeld verzonden naar een nieuw gegenereerd adres.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Aangepast wisselgeld adres</translation>
</message>
<message>
<source>Amount After Fee:</source>
<translation>Bedrag na kosten:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wijzig:</translation>
</message>
<message>
<source>out of sync</source>
<translation>niet gesynchroniseerd</translation>
</message>
<message>
<source>Mint Status: Okay</source>
<translation>Mint status: Oké</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopieer kwanititeit</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopieer hoeveelheid</translation>
</message>
<message>
<source>Starting ResetMintZerocoin: rescanning complete blockchain, this will need up to 30 minutes depending on your hardware.
Please be patient...</source>
<translation>Start ResetMintZerocoin: rescanning complete blockchain, dit zal tot 30 minuten nodig hebben, afhankelijk van uw hardware.
Wees alsjeblieft geduldig...</translation>
</message>
<message>
<source>Spending Zerocoin.
Computationally expensive, might need several minutes depending on the selected Security Level and your hardware.
Please be patient...</source>
<translation>Zerocoin besteden.
Computationeel duur, zou mogelijk enkele minuten nodig hebben, afhankelijk van het geselecteerde beveiligingsniveau en je hardware.
Wees alsjeblieft geduldig...</translation>
</message>
<message>
<source>) needed.
Maximum allowed: </source>
<translation>) vereist.
Maximaal toegestaan:</translation>
</message>
<message>
<source>zSAP Spend #: </source>
<translation>zSAP besteed #:</translation>
</message>
<message>
<source>zSAP Mint</source>
<translation>zSAP mint</translation>
</message>
<message>
<source> <b>enabled</b>.</source>
<translation><b>ingeschakeld</b>.</translation>
</message>
<message>
<source> <b>disabled</b>.</source>
<translation><b>invalide</b>.</translation>
</message>
<message>
<source> Configured target percentage: <b></source>
<translation>Geconfigureerd doelpercentage:<b></translation>
</message>
<message>
<source>zSAP is currently disabled due to maintenance.</source>
<translation>zSAP is momenteel uitgeschakeld vanwege onderhoud.</translation>
</message>
<message>
<source>zSAP is currently undergoing maintenance.</source>
<translation>zSAP wordt momenteel onderhouden.</translation>
</message>
<message>
<source>Denom. with value <b>1</b>:</source>
<translation>Denom. met waarde <b> 1</b>:</translation>
</message>
<message>
<source>Denom. with value <b>5</b>:</source>
<translation>Denom. met waarde <b>5</b>:</translation>
</message>
<message>
<source>Denom. with value <b>10</b>:</source>
<translation>Denom. met waarde <b>10</b>:</translation>
</message>
<message>
<source>Denom. with value <b>50</b>:</source>
<translation>Denom. met waarde <b>50</b>:</translation>
</message>
<message>
<source>Denom. with value <b>100</b>:</source>
<translation>Denom. met waarde <b> 100</b>:</translation>
</message>
<message>
<source>Denom. with value <b>500</b>:</source>
<translation>Denom. met waarde <b> 500</b>:</translation>
</message>
<message>
<source>Denom. with value <b>1000</b>:</source>
<translation>Denom. met waarde <b>1000</b>:</translation>
</message>
<message>
<source>Denom. with value <b>5000</b>:</source>
<translation>Denom. met waarde <b>5000</b>:</translation>
</message>
<message>
<source>AutoMint Status:</source>
<translation>AutoMint-status</translation>
</message>
<message>
<source>Denom. <b>1</b>:</source>
<translation>Denom. <b>1</b>:</translation>
</message>
<message>
<source>Denom. <b>5</b>:</source>
<translation>Denom. <b>5</b>:</translation>
</message>
<message>
<source>Denom. <b>10</b>:</source>
<translation>Denom. <b>10</b>:</translation>
</message>
<message>
<source>Denom. <b>50</b>:</source>
<translation>Denom. <b>50</b>:</translation>
</message>
<message>
<source>Denom. <b>100</b>:</source>
<translation>Denom. <b>100</b>:</translation>
</message>
<message>
<source>Denom. <b>500</b>:</source>
<translation>Denom. <b>500</b>:</translation>
</message>
<message>
<source>Denom. <b>1000</b>:</source>
<translation>Denom. <b>1000</b>:</translation>
</message>
<message>
<source>Denom. <b>5000</b>:</source>
<translation>Denom. <b>5000</b>:</translation>
</message>
<message>
<source>Error: Your wallet is locked. Please enter the wallet passphrase first.</source>
<translation>Fout: Je portemonnee is vergrendeld. Voer alsjeblieft de wachtwoord zin voor de portemonnee in.</translation>
</message>
<message>
<source>Message: Enter an amount > 0.</source>
<translation>Bericht: voer een bedrag in > 0.</translation>
</message>
<message>
<source>Minting </source>
<translation>Minting </translation>
</message>
<message>
<source>Successfully minted </source>
<translation>Succesvol gemint</translation>
</message>
<message>
<source> zSAP in </source>
<translation> zSAP in </translation>
</message>
<message>
<source> sec. Used denominations:
</source>
<translation>sec. gebruikte denominaties:
</translation>
</message>
<message>
<source>Duration: </source>
<translation>Duur:</translation>
</message>
<message>
<source> sec.
</source>
<translation> sec.
</translation>
</message>
<message>
<source>Starting ResetSpentZerocoin: </source>
<translation>Starten van ResetSpentZerocoin: </translation>
</message>
<message>
<source>No 'Pay To' address provided, creating local payment</source>
<translation>Geen 'Betaal aan' adres verstrekt, lokale betaling wordt gemaakt</translation>
</message>
<message>
<source>Invalid Sapphire address</source>
<translation>Ongeldig Sapphire addres</translation>
</message>
<message>
<source>Invalid Send Amount</source>
<translation>Ongeldig verzend bedrag</translation>
</message>
<message>
<source>Confirm additional Fees</source>
<translation>Bevestig extra kosten</translation>
</message>
<message>
<source>Are you sure you want to send?<br /><br /></source>
<translation>Weet je zeker dat je wilt verzenden?<br /><br /></translation>
</message>
<message>
<source> to address </source>
<translation>naar adres</translation>
</message>
<message>
<source> to a newly generated (unused and therefore anonymous) local address <br /></source>
<translation>naar een nieuw gegenereerd (ongebruikt en dus anoniem) lokaal adres<br /></translation>
</message>
<message>
<source>with Security Level </source>
<translation>met beveiligingsniveau</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Bevestig verzending coins</translation>
</message>
<message>
<source>Version 1 zSAP require a security level of 100 to successfully spend.</source>
<translation>Versie 1 zSAP vereist een beveiligingsniveau van 100 om succesvol te besteden.</translation>
</message>
<message>
<source>Failed to spend zSAP</source>
<translation>Mislukt om zSAP te besteden.</translation>
</message>
<message>
<source>Failed to fetch mint associated with serial hash</source>
<translation>Mislukt om de mint op te halen geassocieerd met de seriële hash</translation>
</message>
<message>
<source>Too much inputs (</source>
<translation>Teveel invoer (</translation>
</message>
<message>
<source>
Either mint higher denominations (so fewer inputs are needed) or reduce the amount to spend.</source>
<translation>
Ofwel het munten van hogere denominaties (dus minder invoer nodig) of het te besteden bedrag verminderen.</translation>
</message>
<message>
<source>Spend Zerocoin failed with status = </source>
<translation>Bestede Zerocoin mislukt met status =</translation>
</message>
<message numerus="yes">
<source>PrivacyDialog</source>
<comment>Enter an amount of SAPP to convert to zSAP</comment>
<translation><numerusform>PrivacyDialoog</numerusform><numerusform>PrivacyDialog</numerusform></translation>
</message>
<message>
<source>denomination: </source>
<translation>denominatie:</translation>
</message>
<message>
<source>serial: </source>
<translation>serial: </translation>
</message>
<message>
<source>Spend is 1 of : </source>
<translation>Besteed is 1 of : </translation>
</message>
<message>
<source>value out: </source>
<translation>Waarde uit:</translation>
</message>
<message>
<source>address: </source>
<translation>adres:</translation>
</message>
<message>
<source>Sending successful, return code: </source>
<translation>Verzenden succesvol, retourcode:</translation>
</message>
<message>
<source>txid: </source>
<translation>txid: </translation>
</message>
<message>
<source>fee: </source>
<translation>kosten:</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Hoeveelheid</translation>
</message>
<message>
<source>Enter a SAPP address (e.g. %1)</source>
<translation>Voer een SAPP adres in (b.v. %1)</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>NETWORK</source>
<translation>NETWERK</translation>
</message>
<message>
<source>BLOOM</source>
<translation>BLOOM</translation>
</message>
<message>
<source>UNKNOWN</source>
<translation>ONBEKEND</translation>
</message>
<message>
<source>None</source>
<translation>Geen</translation>
</message>
<message>
<source>N/A</source>
<translation>NB</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Afbeelding opslaan...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>&Kopieer afbeelding</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>QR code opslaan</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>PNG Afbeelding(*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Tools window</source>
<translation>Tools venster</translation>
</message>
<message>
<source>&Information</source>
<translation>&Informatie</translation>
</message>
<message>
<source>General</source>
<translation>Algemeen</translation>
</message>
<message>
<source>Name</source>
<translation>Naam</translation>
</message>
<message>
<source>Client name</source>
<translation>Client naam</translation>
</message>
<message>
<source>N/A</source>
<translation>NB</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Aantal connecties</translation>
</message>
<message>
<source>&Open</source>
<translation>&Open</translation>
</message>
<message>
<source>Startup time</source>
<translation>Opstarttijd</translation>
</message>
<message>
<source>Network</source>
<translation>Netwerk</translation>
</message>
<message>
<source>Last block time</source>
<translation>Laatste blocktijd</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Debug logbestand</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Gebruikt OpenSSL versie</translation>
</message>
<message>
<source>Build date</source>
<translation>Bouwdatum</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Huidige blockaantal</translation>
</message>
<message>
<source>Client version</source>
<translation>Client versie</translation>
</message>
<message>
<source>Using BerkeleyDB version</source>
<translation>Gebruikt BerkeleyDB versie</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blockchain</translation>
</message>
<message>
<source>Open the SAPP debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Open het SAPP debug log bestand uit de huidige data directory. Dit kan enkele seconden duren voor grote logbestanden.</translation>
</message>
<message>
<source>Number of Masternodes</source>
<translation>Aantal Masternodes</translation>
</message>
<message>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<source>Clear console</source>
<translation>Console leegmaken</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Netwerk verkeer</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Leegmaken</translation>
</message>
<message>
<source>Totals</source>
<translation>Totalen</translation>
</message>
<message>
<source>Received</source>
<translation>Ontvangen</translation>
</message>
<message>
<source>Sent</source>
<translation>Verstuurd</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Peers</translation>
</message>
<message>
<source>Banned peers</source>
<translation>Verboden leeftijdsgenoten</translation>
</message>
<message>
<source>Select a peer to view detailed information.</source>
<translation>Selecteer een peer om gedetailleerde informatie te bekijken.</translation>
</message>
<message>
<source>Whitelisted</source>
<translation>Whitelisted</translation>
</message>
<message>
<source>Direction</source>
<translation>Richting</translation>
</message>
<message>
<source>Protocol</source>
<translation>Protocol</translation>
</message>
<message>
<source>Version</source>
<translation>Versie</translation>
</message>
<message>
<source>Services</source>
<translation>Diensten</translation>
</message>
<message>
<source>Ban Score</source>
<translation>Ban score</translation>
</message>
<message>
<source>Connection Time</source>
<translation>Connectietijd</translation>
</message>
<message>
<source>Last Send</source>
<translation>Laatst Verzonden</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Laatst Ontvangen</translation>
</message>
<message>
<source>Bytes Sent</source>
<translation>Verzonden Bytes</translation>
</message>
<message>
<source>Bytes Received</source>
<translation>Ontvangen Bytes</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Pingtijd</translation>
</message>
<message>
<source>&Wallet Repair</source>
<translation>&Portemonnee herstel</translation>
</message>
<message>
<source>Delete local Blockchain Folders</source>
<translation>Verwijder lokale Blockchain Mappen</translation>
</message>
<message>
<source>Wallet In Use:</source>
<translation>Portemonnee in gebruik:</translation>
</message>
<message>
<source>Starting Block</source>
<translation>Startblok</translation>
</message>
<message>
<source>Synced Headers</source>
<translation>Gesynchroniseerde headers</translation>
</message>
<message>
<source>Synced Blocks</source>
<translation>Gesynchroniseerde blokken</translation>
</message>
<message>
<source>The duration of a currently outstanding ping.</source>
<translation>De duur van een momenteel openstaande ping.</translation>
</message>
<message>
<source>Ping Wait</source>
<translation>Ping wacht</translation>
</message>
<message>
<source>Time Offset</source>
<translation>Tijdverschuiving</translation>
</message>
<message>
<source>Custom Backup Path:</source>
<translation>Aangepast back-up pad:</translation>
</message>
<message>
<source>Custom zSAP Backup Path:</source>
<translation>Aangepast zSAP back-up pad:</translation>
</message>
<message>
<source>Custom Backups Threshold:</source>
<translation>Aangepaste back-up drempel:</translation>
</message>
<message>
<source>Salvage wallet</source>
<translation>Red portemonnee</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat.</source>
<translation>Poog om privé sleutels terug te halen uit een corrupte wallet.dat.</translation>
</message>
<message>
<source>Rescan blockchain files</source>
<translation>Herscan blockchain bestanden</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions.</source>
<translation>Herscan de blockchain voor ontbrekende portemonnee transacties.</translation>
</message>
<message>
<source>Recover transactions 1</source>
<translation>Herstel transacties 1</translation>
</message>
<message>
<source>Recover transactions from blockchain (keep meta-data, e.g. account owner).</source>
<translation>Herstel transacties van blockchain (houd meta-data, bijvoorbeeld account eigenaar).</translation>
</message>
<message>
<source>Recover transactions 2</source>
<translation>Herstel transacties 2</translation>
</message>
<message>
<source>Recover transactions from blockchain (drop meta-data).</source>
<translation>Herstel transacties van blockchain (laat meta-data vallen).</translation>
</message>
<message>
<source>Upgrade wallet format</source>
<translation>Upgrade portemonnee format</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files.</source>
<translation>Herstel blockchain index van huidige blk000??.dat bestanden.</translation>
</message>
<message>
<source>-resync:</source>
<translation>-resync:</translation>
</message>
<message>
<source>Deletes all local blockchain folders so the wallet synchronizes from scratch.</source>
<translation>Verwijder alle lokale blockchain mappen zodat de portemonnee alles opnieuw kan hersynchroniseren.</translation>
</message>
<message>
<source>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</source>
<translation>De onderstaande knoppen zullen de portemonnee opnieuw opstarten met command line opties om de portemonnee te repareren, problemen op te lossen met corrupte blockchain bestanden of ontbrekende/verouderde transacties.</translation>
</message>
<message>
<source>Wallet repair options.</source>
<translation>Portemonnee herstelopties.</translation>
</message>
<message>
<source>Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself!)</source>
<translation>Upgrade portemonnee naar nieuwste format bij opstarten. (Opmerking: dit is NIET een update van de portemonnee zelf!)</translation>
</message>
<message>
<source>Rebuild index</source>
<translation>Herbouw index</translation>
</message>
<message>
<source>In:</source>
<translation>In:</translation>
</message>
<message>
<source>Out:</source>
<translation>Uit:</translation>
</message>
<message>
<source>Welcome to the SAPP RPC console.</source>
<translation>Welkom bij de SAPP RPC console.</translation>
</message>
<message>
<source>&Disconnect Node</source>
<translation>& Koppel Node los</translation>
</message>
<message>
<source>Ban Node for</source>
<translation>Ban Node voor</translation>
</message>
<message>
<source>1 &hour</source>
<translation>1 &hour</translation>
</message>
<message>
<source>1 &day</source>
<translation>1 &day</translation>
</message>
<message>
<source>1 &week</source>
<translation>1 &week</translation>
</message>
<message>
<source>1 &year</source>
<translation>1 &year</translation>
</message>
<message>
<source>&Unban Node</source>
<translation>& Veel succes</translation>
</message>
<message>
<source>This will delete your local blockchain folders and the wallet will synchronize the complete Blockchain from scratch.<br /><br /></source>
<translation>Dit zal al jouw lokale blockchain mappen verwijderen en de portemonnee zal de blockchain helemaal opnieuw synchroniseren.<br /><br /></translation>
</message>
<message>
<source>This needs quite some time and downloads a lot of data.<br /><br /></source>
<translation>Dit vergt nogal wat tijd en downloadt veel data.<br /><br /></translation>
</message>
<message>
<source>Your transactions and funds will be visible again after the download has completed.<br /><br /></source>
<translation>Uw transacties en tegoeden zijn opnieuw zichtbaar nadat het downloaden is voltooid.<br /><br /></translation>
</message>
<message>
<source>Do you want to continue?.<br /></source>
<translation>Wil je verdergaan?</translation>
</message>
<message>
<source>Confirm resync Blockchain</source>
<translation>Bevestig hersynchronisering van Blockchain</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Gebruik omhoog en omlaag pijlen om de geschiedenis te navigeren, en<b>Ctrl-L</b>om scherm te wissen.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Type <b>help </b>voor een overzicht van beschikbare commando's.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<source>(node id: %1)</source>
<translation>(node id: %1)</translation>
</message>
<message>
<source>via %1</source>
<translation>via %1</translation>
</message>
<message>
<source>never</source>
<translation>nooit</translation>
</message>
<message>
<source>Inbound</source>
<translation>Inkomende</translation>
</message>
<message>
<source>Outbound</source>
<translation>Uitgaande</translation>
</message>
<message>
<source>Yes</source>
<translation>ja</translation>
</message>
<message>
<source>No</source>
<translation>Nee</translation>
</message>
<message>
<source>Unknown</source>
<translation>Ongekend</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>Reuse one of the previously used receiving addresses.<br>Reusing addresses has security and privacy issues.<br>Do not use this unless re-generating a payment request made before.</source>
<translation>Hergebruik een van de eerder gebruikte ontvangstadressen.<br>Hergebruik van adressen heeft beveiligings- en privacyproblemen. <br>Gebruik dit niet tenzij u eerder een betalingsverzoek heeft aangemaakt.</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>Hergebruik een bestaand ontvangstadres (niet aanbevolen)</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Bericht:</translation>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation>Een optioneel label om te associëren met het nieuwe ontvangstadres.</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the SAPP network.</source>
<translation>Een optioneel bericht dat aan het betalingsverzoek wordt gehecht, dat wordt weergegeven wanneer het verzoek wordt geopend. Opmerking: het bericht wordt niet verzonden met de betaling via het SAPP netwerk.</translation>
</message>
<message>
<source>RECEIVE</source>
<translation>ontvangen</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened.<br>Note: The message will not be sent with the payment over the SAPP network.</source>
<translation>Een optioneel bericht dat aan het betalingsverzoek wordt gehecht, dat wordt weergegeven wanneer het verzoek wordt geopend.<br>Opmerking: het bericht wordt niet verzonden met de betaling via het SAPP netwerk.</translation>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Gebruik dit formulier om betalingen aan te vragen. Alle velden zijn<b>optioneel</b>.</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>Een optioneel bedrag om te vragen. Laat dit leeg of vul een nul in om geen specifiek bedrag te vragen.</translation>
</message>
<message>
<source>&Amount:</source>
<translation>&Hoeveelheid:</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Verzoek betaling</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Leeg alle velden van het formulier.</translation>
</message>
<message>
<source>Clear</source>
<translation>Leegmaken</translation>
</message>
<message>
<source>Requested payments history</source>
<translation>Betalingsverzoeken geschiedenis</translation>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Toon het geselecteerde verzoek (doet hetzelfde als dubbelklik op een item)</translation>
</message>
<message>
<source>Show</source>
<translation>Toon</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Verwijder de geselecteerde vermeldingen uit de lijst</translation>
</message>
<message>
<source>Remove</source>
<translation>Verwijder</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopiëer label</translation>
</message>
<message>
<source>Copy message</source>
<translation>Bericht kopiëren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopieer hoeveelheid</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR Code</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Kopieer &URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Kopieer &Adres</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Afbeelding opslaan...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Verzoek betaling aan %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Betalingsinformatie</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<source>Amount</source>
<translation>Hoeveelheid</translation>
</message>
<message>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<source>Message</source>
<translation>Bericht</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Resulterende URI te lang geleden, probeer de tekst voor label/bericht te verminderen.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Fout bij het coderen van URI in QR-code.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<source>Message</source>
<translation>Bericht</translation>
</message>
<message>
<source>Amount</source>
<translation>Hoeveelheid</translation>
</message>
<message>
<source>(no label)</source>
<translation>(geen label)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(geen bericht)</translation>
</message>
<message>
<source>(no amount)</source>
<translation>(geen hoeveelheid)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Verzend Munten</translation>
</message>
<message>
<source>SEND</source>
<translation>STUREN</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Munt controle kenmerken</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Onvoldoende saldo!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Kwantiteit:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Hoeveelheid:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Prioriteit:</translation>
</message>
<message>
<source>medium</source>
<translation>medium</translation>
</message>
<message>
<source>Fee:</source>
<translation>Kost:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Stof:</translation>
</message>
<message>
<source>no</source>
<translation>nee</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Na de kost:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wijzig:</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Als dit geactiveerd is, maar het wisselgeld adres is leeg of ongeldig, wordt het wisselgeld verzonden naar een nieuw gegenereerd adres.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Aangepast wisselgeld adres</translation>
</message>
<message>
<source>Split UTXO</source>
<translation>Split UTXO</translation>
</message>
<message>
<source># of outputs</source>
<translation># outputs</translation>
</message>
<message>
<source>UTXO Size:</source>
<translation>UTXO grootte:</translation>
</message>
<message>
<source>0 SAPP</source>
<translation>0 SAPP</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Transactiekosten: </translation>
</message>
<message>
<source>Choose...</source>
<translation>Kies...</translation>
</message>
<message>
<source>collapse fee-settings</source>
<translation>Klap kosten instellingen in</translation>
</message>
<message>
<source>Minimize</source>
<translation>Minimaliseer</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>per kilobyte</translation>
</message>
<message>
<source>total at least</source>
<translation>totaal tenminste</translation>
</message>
<message>
<source>(read the tooltip)</source>
<translation>(lees de tooltip)</translation>
</message>
<message>
<source>Custom:</source>
<translation>Aangepast:</translation>
</message>
<message>
<source>(Smart fee not initialized yet. This usually takes a few blocks...)</source>
<translation>(Smart fee nog niet geïnitialiseerd. Dit duurt meestal een paar blokken ...)</translation>
</message>
<message>
<source>SwiftX</source>
<translation>SwiftX</translation>
</message>
<message>
<source>Confirmation time:</source>
<translation>Bevestigingstijd:</translation>
</message>
<message>
<source>Open Coin Control...</source>
<translation>Open munt controle...</translation>
</message>
<message>
<source>Coins automatically selected</source>
<translation>Munten automatisch geselecteerd</translation>
</message>
<message>
<source>If the custom fee is set to 1000 uSAPs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uSAPs in fee,<br />while "at least" pays 1000 uSAPs. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Als de aangepaste vergoeding is ingesteld op 1000 uSAPs en de transactie is slechts 250 bytes, dan betaalt per kilobyte alleen 250 uSAPP's in vergoeding,<br />terwijl "minstens" 1000 uSAPP's betaalt. Voor transacties die groter zijn dan een kilobyte, betalen beiden per kilobyte.</translation>
</message>
<message>
<source>If the custom fee is set to 1000 uSAPs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uSAPs in fee,<br />while "total at least" pays 1000 uSAPs. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Als de aangepaste vergoeding is ingesteld op 1000 uSAPs en de transactie is slechts 250 bytes, dan betaalt per kilobyte alleen 250 uSAPP's in vergoeding,<br />terwijl "totaal minstens" 1000 uSAPP's betaalt. Voor transacties die groter zijn dan een kilobyte, betalen beiden per kilobyte.</translation>
</message>
<message>
<source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks.<br />But be aware that this can end up in a never confirming transaction once there is more demand for SAPP transactions than the network can process.</source>
<translation>Alleen de minimale vergoeding betalen is prima, zolang er minder transactie volume is dan ruimte in de blokken. <br />Maar wees ervan bewust dat dit kan leiden tot een nooit bevestigende transactie wanneer er meer vraag is naar SAPP transacties dan het netwerk kan verwerken.</translation>
</message>
<message>
<source>normal</source>
<translation>normaal</translation>
</message>
<message>
<source>fast</source>
<translation>snel</translation>
</message>
<message>
<source>Recommended</source>
<translation>Aanbevolen</translation>
</message>
<message>
<source>Send as zero-fee transaction if possible</source>
<translation>Zend als zero-fee transactie indien mogelijk</translation>
</message>
<message>
<source>(confirmation may take longer)</source>
<translation>(bevestiging kan langer duren)</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Bevestig de verzendactie</translation>
</message>
<message>
<source>S&end</source>
<translation>V&erzenden</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Leeg alle velden van het formulier.</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Verwijder &Alles</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Verzend naar meerdere ontvangers tegelijk</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Voeg &ontvanger toe</translation>
</message>
<message>
<source>Anonymized SAPP</source>
<translation>Geanonimiseerde SAPP</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balans:</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Kopieer kwanititeit</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopieer hoeveelheid</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Kopiëer kost</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Kopiëer na kost</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Kopieer bytes</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Kopieer prioriteit</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Kopieer dust</translation>
</message>
<message>
<source>Copy change</source>
<translation>Kopieer wisselgeld</translation>
</message>
<message>
<source>The split block tool does not work when sending to outside addresses. Try again.</source>
<translation>Het split block tool werkt niet bij het verzenden naar externe adressen. Probeer het nog eens.</translation>
</message>
<message>
<source>The split block tool does not work with multiple addresses. Try again.</source>
<translation>Het split block tool werkt niet met meerdere adressen. Probeer het nog eens.</translation>
</message>
<message>
<source>Warning: Invalid SAPP address</source>
<translation>Waarschuwing: Ongeldig SAPP adres</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 naar %2</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Ben je zeker dat je wilt verzenden?</translation>
</message>
<message>
<source>are added as transaction fee</source>
<translation>worden toegevoegd als transactiekosten</translation>
</message>
<message>
<source>Total Amount = <b>%1</b><br />= %2</source>
<translation>Totale hoeveelheid = <b>%1 </b><br />= %2</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Bevestig verzending coins</translation>
</message>
<message>
<source>A fee %1 times higher than %2 per kB is considered an insanely high fee.</source>
<translation>Een vergoeding %1 keer hoger dan %2 per kB wordt beschouwd als een zwaar hoge vergoeding.</translation>
</message>
<message numerus="yes">
<source>Estimated to begin confirmation within %n block(s).</source>
<translation><numerusform>Schatting om te beginnen met bevestiging in %n blokken.</numerusform><numerusform>Schatting om te beginnen met bevestiging in %n blokken.</numerusform></translation>
</message>
<message>
<source>The recipient address is not valid, please recheck.</source>
<translation>Het ontvangstadres is niet geldig, controleer deze.</translation>
</message>
<message>
<source>using SwiftX</source>
<translation>met SwiftX</translation>
</message>
<message>
<source> split into %1 outputs using the UTXO splitter.</source>
<translation>gesplitst in %1 outputs met behulp van de UTXO splitter.</translation>
</message>
<message>
<source><b>(%1 of %2 entries displayed)</b></source>
<translation><b>(%1 van %2 vermeldingen weergegeven)</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>Het te betalen bedrag moet groter zijn dan 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>Het bedrag overschrijdt uw saldo.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Het totaal overschrijdt uw saldo wanneer de transactievergoeding %1 is inbegrepen.</translation>
</message>
<message>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Dubbel adres gevonden, kan alleen per keer per verzendoperatie naar elk adres versturen.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Transactie creatie mislukt!</translation>
</message>
<message>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>De transactie is afgewezen! Dit kan gebeuren als sommige munten in je portemonnee al waren uitgegeven, zoals als je een kopie van de wallet.dat en munten in de kopie waren besteed maar niet gemarkeerd zoals hier besteed.</translation>
</message>
<message>
<source>Error: The wallet was unlocked only to anonymize coins.</source>
<translation>Fout: De portemonnee was alleen geopend om munten te anonimiseren.</translation>
</message>
<message>
<source>Error: The wallet was unlocked only to anonymize coins. Unlock canceled.</source>
<translation>Fout: De portemonnee was alleen geopend om munten te anonimiseren. Ontgrendelen geannuleerd.</translation>
</message>
<message>
<source>Pay only the minimum fee of %1</source>
<translation>Betaal alleen de minimumkost van %1</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation>Waarschuwing: Ongekend wisselgeld adres</translation>
</message>
<message>
<source>(no label)</source>
<translation>(geen label)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>This is a normal payment.</source>
<translation>Dit is een normale betaling.</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>Betaal &Naar:</translation>
</message>
<message>
<source>The SAPP address to send the payment to</source>
<translation>Het SAPP adres om de betaling naar te verzenden</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Kies een eerder gebruikt adres</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Plak adres vanaf klembord</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Verwijder dit item</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Vul een label in voor dit adres om deze toe te voegen aan de lijst met gebruikte adressen</translation>
</message>
<message>
<source>A&mount:</source>
<translation>&Hoeveelheid:</translation>
</message>
<message>
<source>Message:</source>
<translation>Bericht:</translation>
</message>
<message>
<source>A message that was attached to the SAPP: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the SAPP network.</source>
<translation>Een bericht dat is gehecht aan de SAPP: URI die bij de transactie wordt opgeslagen voor uw referentie. Opmerking: dit bericht wordt niet verzonden via het SAPP netwerk.</translation>
</message>
<message>
<source>This is an unverified payment request.</source>
<translation>Dit is een ongeverifieerde betalingsverzoek.</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Betaal aan:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Memo:</translation>
</message>
<message>
<source>This is a verified payment request.</source>
<translation>Dit is een geverifieerd betalingsverzoek.</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Voer een label in voor dit adres om het toe te voegen aan jouw adresboek</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Sapphire Core is shutting down...</source>
<translation>Sapphire Core is aan het afsluiten...</translation>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Sluit de computer niet af voordat dit venster verdwenen is.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Handtekeningen - Onderteken / Verifieer een Bericht</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Onderteken Bericht</translation>
</message>
<message>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Je kunt berichten met je adressen ondertekenen om te bewijzen dat je ze bezit. Wees voorzichtig om niets vaags te ondertekenen, omdat phishing aanvallen je misschien kunnen proberen om je identiteit over te geven. Teken alleen volledig gedetailleerde verklaringen aan waar je mee akkoord gaat.</translation>
</message>
<message>
<source>The SAPP address to sign the message with</source>
<translation>Het SAPP adres om het bericht met te ondertekenen</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Kies een eerder gebruikt adres</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Plak adres vanaf klembord</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Voer hier het bericht in dat u wilt ondertekenen</translation>
</message>
<message>
<source>Signature</source>
<translation>Handtekening</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopieer de huidige handtekening naar het systeemklembord</translation>
</message>
<message>
<source>Sign the message to prove you own this SAPP address</source>
<translation>Onderteken het bericht om te bewijzen dat u het SAPP adres bezit</translation>
</message>
<message>
<source>The SAPP address the message was signed with</source>
<translation>Het SAPP adres waarmee het bericht was ondertekend</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified SAPP address</source>
<translation>Controleer een bericht om te verifiëren dat het ondertekend is door het gespecificeerde SAPP adres</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Onderteken &Bericht</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Maak alle ondertekenvelden leeg</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Verwijder &Alles</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Verifieer Bericht</translation>
</message>
<message>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Voer het ondertekening adres in, verifieer het bericht (zorg ervoor dat je line breaks, spaties, tabs, enz.) en onderteken hieronder om het bericht te verifiëren. Wees voorzichtig om niet meer in de handtekening te lezen dan in het ondertekende bericht zelf, om te voorkomen dat je door een man-in-de-middle aanval wordt getroffen.</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Verifeer &Message</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Maak alle verifiëren van het bericht velden leeg</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Klik op "Onderteken Bericht" om een handtekening te genereren</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Het ingevoerde adres is ongeldig.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Controleer het adres en probeer het opnieuw.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Het opgegeven adres verwijst niet naar een sleutel.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Portemonnee-ontsleuteling is geannuleerd.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Geheime sleutel voor het ingevoerde adres is niet beschikbaar.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Bericht ondertekenen mislukt.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Bericht ondertekend.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>De handtekening kon niet gedecodeerd worden.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Controleer de handtekening en probeer het opnieuw.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>De handtekening kwam niet overeen met de berichtverdeling.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Bericht verificatie mislukt.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Bericht geverifieerd.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Sapphire Core</source>
<translation>SAPP Kern</translation>
</message>
<message>
<source>Version %1</source>
<translation>Versie %1</translation>
</message>
<message>
<source>The Bitcoin Core developers</source>
<translation>De Bitcoin Kernontwikkelaars</translation>
</message>
<message>
<source>The Dash Core developers</source>
<translation>De Dash Kernontwikkelaars</translation>
</message>
<message>
<source>The Sapphire Core developers</source>
<translation>De SAPP Kernontwikkelaars</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Klaar voor %n meer blokken</numerusform><numerusform>Klaar voor %n meer blokken</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>Open tot %1</translation>
</message>
<message>
<source>conflicted</source>
<translation>conflicteert</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/onbevestigd</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 bevestigingen</translation>
</message>
<message>
<source>%1/offline (verified via SwiftX)</source>
<translation>%1/offline (gecontroleerd via SwiftX)</translation>
</message>
<message>
<source>%1/confirmed (verified via SwiftX)</source>
<translation>%1/bevestigd (gecontroleerd via swifttx)</translation>
</message>
<message>
<source>%1 confirmations (verified via SwiftX)</source>
<translation>%1 bevestigingen (gecontroleerd via swifttx)</translation>
</message>
<message>
<source>%1/offline (SwiftX verification in progress - %2 of %3 signatures)</source>
<translation>%1/offline (SwiftTX controle in uitvoering - %2 van %3 handtekeningen)</translation>
</message>
<message>
<source>%1/confirmed (SwiftX verification in progress - %2 of %3 signatures )</source>
<translation>%1/bevestigd (SwiftTX controle in uitvoering - %2 van %3 handtekeningen)</translation>
</message>
<message>
<source>%1 confirmations (SwiftX verification in progress - %2 of %3 signatures)</source>
<translation>%1 bevestigingen (SwiftTX controle in uitvoering -%2 van %3 handtekeningen)</translation>
</message>
<message>
<source>%1/offline (SwiftX verification failed)</source>
<translation>%1/offline (SwiftTX verificatie mislukt)</translation>
</message>
<message>
<source>%1/confirmed (SwiftX verification failed)</source>
<translation>%1/bevestigd (SwiftTX verificatie mislukt)</translation>
</message>
<message>
<source>Status</source>
<translation>Status</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, is nog niet succesvol uitgezonden</translation>
</message>
<message numerus="yes">
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, uitgezonden via %n nodes</numerusform><numerusform>, uitgezonden via %n nodes</numerusform></translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Source</source>
<translation>Bron</translation>
</message>
<message>
<source>Generated</source>
<translation>Gegeneerd</translation>
</message>
<message>
<source>From</source>
<translation>Van</translation>
</message>
<message>
<source>unknown</source>
<translation>ongekend</translation>
</message>
<message>
<source>To</source>
<translation>Naar</translation>
</message>
<message>
<source>own address</source>
<translation>eigen adres</translation>
</message>
<message>
<source>watch-only</source>
<translation>watch-only</translation>
</message>
<message>
<source>label</source>
<translation>label</translation>
</message>
<message>
<source>Credit</source>
<translation>Credit</translation>
</message>
<message numerus="yes">
<source>matures in %n more block(s)</source>
<translation><numerusform>maturiteit in %n meer blokken</numerusform><numerusform>maturiteit in %n meer blokken</numerusform></translation>
</message>
<message>
<source>not accepted</source>
<translation>niet geaccepteerd</translation>
</message>
<message>
<source>Debit</source>
<translation>Debet</translation>
</message>
<message>
<source>Total debit</source>
<translation>Totaal debet</translation>
</message>
<message>
<source>Total credit</source>
<translation>Totaal credit</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Transactiekosten</translation>
</message>
<message>
<source>Net amount</source>
<translation>Netto bedrag</translation>
</message>
<message>
<source>Message</source>
<translation>Bericht</translation>
</message>
<message>
<source>Comment</source>
<translation>Reactie</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>Transactie ID</translation>
</message>
<message>
<source>Output index</source>
<translation>Output index</translation>
</message>
<message>
<source>Merchant</source>
<translation>Winkelier</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Gegenereerde munten moeten %1 blokken rijpen voordat ze kunnen worden uitgegeven. Wanneer je dit blok hebt gegenereerd, wordt het naar het netwerk uitgezonden om aan de blockchain toegevoegd te worden. Als het niet in de keten komt, verandert de staat in "niet geaccepteerd" en zal het niet uitgeefbaar worden. Dit kan af en toe gebeuren als een andere node binnen een paar seconden van u een blok genereert.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Debug informatie</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transactie</translation>
</message>
<message>
<source>Inputs</source>
<translation>Inputs</translation>
</message>
<message>
<source>Amount</source>
<translation>Hoeveelheid</translation>
</message>
<message>
<source>true</source>
<translation>waar</translation>
</message>
<message>
<source>false</source>
<translation>onwaar</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Transactiedetails</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Dit venster laat een gedetailleerde beschrijving van de transactie zien</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<source>Address</source>
<translation>Adres</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Klaar voor %n meer blokken</numerusform><numerusform>Klaar voor %n meer blokken</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>Open tot %1</translation>
</message>
<message>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Onbevestigd</translation>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Bevestigen (%1 van %2 aanbevolen bevestigingen)</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Bevestigd (%1 bevestigingen)</translation>
</message>
<message>
<source>Conflicted</source>
<translation>Geconflicteerd</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Onvolwassen (%1 bevestigingen, zullen beschikbaar zijn na %2)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Dit blok is niet ontvangen door andere noden en wordt waarschijnlijk niet geaccepteerd!</translation>
</message>
<message>
<source>Received with</source>
<translation>Ontvangen met</translation>
</message>
<message>
<source>Masternode Reward</source>
<translation>Masternode beloning</translation>
</message>
<message>
<source>Received from</source>
<translation>Ontvangen van</translation>
</message>
<message>
<source>Received via Obfuscation</source>
<translation>Verkregen via verduistering</translation>
</message>
<message>
<source>SAPP Stake</source>
<translation>SAPP Stake</translation>
</message>
<message>
<source>zSAP Stake</source>
<translation>zSAP Stake</translation>
</message>
<message>
<source>Obfuscation Denominate</source>
<translation>Verduistering denominatie</translation>
</message>
<message>
<source>Obfuscation Collateral Payment</source>
<translation>Verduistering zijdelingse betaling</translation>
</message>
<message>
<source>Obfuscation Make Collateral Inputs</source>
<translation>Verduistering maakt zijdelingse inputs</translation>
</message>
<message>
<source>Obfuscation Create Denominations</source>
<translation>Verduistering creëert denominaties</translation>
</message>
<message>
<source>Converted SAPP to zSAP</source>
<translation>Geconverteerde SAPP naar zSAP</translation>
</message>
<message>
<source>Spent zSAP</source>
<translation>Bestede zSAP</translation>
</message>
<message>
<source>Received SAPP from zSAP</source>
<translation>Ontvangen SAPP van zSAP</translation>
</message>
<message>
<source>Minted Change as zSAP from zSAP Spend</source>
<translation>Gemint wisselgeld als zSAP van bestede zSAP</translation>
</message>
<message>
<source>Converted zSAP to SAPP</source>
<translation>Geconverteerde zSAP naar SAPP</translation>
</message>
<message>
<source>Anonymous (zSAP Transaction)</source>
<translation>Anonieme (zSAP transactie)</translation>
</message>
<message>
<source>Anonymous (zSAP Stake)</source>
<translation>Anonieme (zSAP stake)</translation>
</message>
<message>
<source>Sent to</source>
<translation>Verzenden naar</translation>
</message>
<message>
<source>Orphan Block - Generated but not accepted. This does not impact your holdings.</source>
<translation>Orphan Block - Gegenereerd maar niet geaccepteerd. Dit heeft geen invloed op uw bezit.</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Betaling naar jezelf</translation>
</message>
<message>
<source>Mined</source>
<translation>Mined</translation>
</message>
<message>
<source>Obfuscated</source>
<translation>Verduisterd</translation>
</message>
<message>
<source>watch-only</source>
<translation>watch-only</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(n/b)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transactie status. Beweeg over dit veld om het aantal bevestigingen te tonen.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Datum en tijd waarop de transactie is ontvangen.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Type transactie.</translation>
</message>
<message>
<source>Whether or not a watch-only address is involved in this transaction.</source>
<translation>Of een watch-only adres wel of niet betrokken is bij deze transactie.</translation>
</message>
<message>
<source>Destination address of transaction.</source>
<translation>Bestemming adres van de transactie.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Bedrag verwijderd uit of toegevoegd aan saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Alle</translation>
</message>
<message>
<source>Today</source>
<translation>Vandaag</translation>
</message>
<message>
<source>This week</source>
<translation>Deze week</translation>
</message>
<message>
<source>This month</source>
<translation>Deze maand</translation>
</message>
<message>
<source>Last month</source>
<translation>Afgelopen maand</translation>
</message>
<message>
<source>This year</source>
<translation>Dit jaar</translation>
</message>
<message>
<source>Range...</source>
<translation>Omvang...</translation>
</message>
<message>
<source>Most Common</source>
<translation>Meest voorkomend</translation>
</message>
<message>
<source>Received with</source>
<translation>Ontvangen met</translation>
</message>
<message>
<source>Sent to</source>
<translation>Verzenden naar</translation>
</message>
<message>
<source>To yourself</source>
<translation>Naar jezelf</translation>
</message>
<message>
<source>Mined</source>
<translation>Mined</translation>
</message>
<message>
<source>Minted</source>
<translation>Minted</translation>
</message>
<message>
<source>Masternode Reward</source>
<translation>Masternode beloning</translation>
</message>
<message>
<source>Zerocoin Mint</source>
<translation>Zerocoin Mint</translation>
</message>
<message>
<source>Zerocoin Spend</source>
<translation>Zerocoin uitgegeven</translation>
</message>
<message>
<source>Zerocoin Spend to Self</source>
<translation>Zerocoin uitgegeven aan jezelf</translation>
</message>
<message>
<source>Other</source>
<translation>Andere</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Adres of label invullen om te zoeken</translation>
</message>
<message>
<source>Min amount</source>
<translation>Minimale hoeveelheid</translation>
</message>
<message>
<source>Copy address</source>
<translation>Kopieer adres</translation>
</message>
<message>
<source>Copy label</source>
<translation>Kopiëer label</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Kopieer hoeveelheid</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Kopier transactie ID</translation>
</message>
<message>
<source>Edit label</source>
<translation>Label wijzigen</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Bekijk transactiedetails</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Exporteer Transactiegeschiedenis</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommagescheiden bestand (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bevestigd</translation>
</message>
<message>
<source>Watch-only</source>
<translation>Watch-only</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<source>Address</source>
<translation>Adres</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Export Mislukt</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Er is een fout opgetreden om de transactiegeschiedenis te bewaren naar %1.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Exporteren succesvol</translation>
</message>
<message>
<source>Received SAPP from zSAP</source>
<translation>Ontvangen SAPP van zSAP</translation>
</message>
<message>
<source>Zerocoin Spend, Change in zSAP</source>
<translation>Zerocoin besteed, wisselgeld in zSAP</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>De transactiegeschiedenis is succesvol bewaard in %1.</translation>
</message>
<message>
<source>Range:</source>
<translation>Bereik:</translation>
</message>
<message>
<source>to</source>
<translation>naar</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Eenheid om bedragen te laten zien. Klik om een andere eenheid te selecteren.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>Er is geen portemonnee ingeladen.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Verzend Munten</translation>
</message>
<message>
<source>SwiftX doesn't support sending values that high yet. Transactions are currently limited to %1 SAPP.</source>
<translation>SwiftTX ondersteunt geen verzendwaarden die zo hoog zijn. Transacties zijn momenteel beperkt tot %1 SAPP.</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>HISTORY</source>
<translation>GESCHIEDENIS</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exporteren</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporteer de data in de huidige tab naar een bestand</translation>
</message>
<message>
<source>Selected amount:</source>
<translation>Geselecteerde hoeveelheid:</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Backup portemonnee</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Portemonneegegevens (*.dat)</translation>
</message>
</context>
<context>
<name>ZSapControlDialog</name>
<message>
<source>Select zSAP to Spend</source>
<translation>Selecteer zSAP om te besteden</translation>
</message>
<message>
<source>Quantity</source>
<translation>Hoeveelheid</translation>
</message>
<message>
<source>0</source>
<translation>0</translation>
</message>
<message>
<source>zSAP</source>
<translation>zSAP</translation>
</message>
<message>
<source>Select/Deselect All</source>
<translation>Selecteer/Deselecteer Alles</translation>
</message>
<message>
<source>Is Spendable</source>
<translation>Is Uitgeefbaar</translation>
</message>
</context>
<context>
<name>sap-core</name>
<message>
<source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source>
<translation>(1 = houd tx meta data, bijvoorbeeld account eigenaar en betalingsverzoek informatie, 2 = drop tx meta data)</translation>
</message>
<message>
<source>Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source>
<translation>Sta JSON-RPC connecties toe van de opgegeven bron. Geldig voor<ip>zijn een enkel IP (bijvoorbeeld 1.2.3.4), een netwerk/netmask (bijvoorbeeld 1.2.3.4/255.255.255.0) of een netwerk/CIDR (bijvoorbeeld 1.2.3.4/24). Deze optie kan meerdere keren worden opgegeven</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Bind naar het gegeven adres en luister er altijd naar. Gebruik [host]:poort notatie voor IPv6</translation>
</message>
<message>
<source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source>
<translation>Bind naar bepaald adres en whitelist peers die er verbinding mee maken. Gebruik [host]:poort notatie voor IPv6</translation>
</message>
<message>
<source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source>
<translation>Bind naar het opgegeven adres om te luisteren naar JSON-RPC verbindingen. Gebruik [host]:poort notatie voor IPv6. Deze optie kan meerdere keren worden opgegeven (standaard: bind aan alle interfaces)</translation>
</message>
<message>
<source>Calculated accumulator checkpoint is not what is recorded by block index</source>
<translation>Het berekende accumulatie controlepunt is niet wat wordt geregistreerd door de blokindex</translation>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. Sapphire Core is probably already running.</source>
<translation>Kan geen vergrendeling op data directory %s verkrijgen. Sapphire Core loopt waarschijnlijk al.</translation>
</message>
<message>
<source>Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</source>
<translation>Verander automatisch gefinaliseerd budget voting gedrag. modus=auto: Stem enkel voor exact gefinaliseerde budget overeenkomend met mijn gegenereerde budget. (tekenreeks, standaard: auto)</translation>
</message>
<message>
<source>Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:%u)</source>
<translation>Doorlopend rate-limit gratis transacties naar<n>*1000 bytes per minuut (default:%u)</translation>
</message>
<message>
<source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source>
<translation>Maak nieuwe bestanden met systeem standaard permissies, in plaats van umask 077 (alleen effectief met gedeactiveerde wallet functionaliteit)</translation>
</message>
<message>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation>Verwijder alle portemonnee transacties en herstel alleen die delen van de blockchain via -rescan bij opstarten</translation>
</message>
<message>
<source>Delete all zerocoin spends and mints that have been recorded to the blockchain database and reindex them (0-1, default: %u)</source>
<translation>Verwijder alle bestede Zerocoin en mints die zijn geregistreerd in de blockchain database en her-indexeer ze (0-1, standaard: %u)</translation>
</message>
<message>
<source>Distributed under the MIT software license, see the accompanying file COPYING or <http://www.opensource.org/licenses/mit-license.php>.</source>
<translation>Gedistribueerd onder de MIT software licentie, zie het bijgevoegde bestand COPYING of <http://www.opensource.org/licenses/mit-license.php>.</translation>
</message>
<message>
<source>Enable automatic wallet backups triggered after each zSAP minting (0-1, default: %u)</source>
<translation>Schakel automatische portemonnee back-ups in geactiveerd na elke geminte zSAP (0-1, standaard: %u)</translation>
</message>
<message>
<source>Enable or disable staking functionality for SAPP inputs (0-1, default: %u)</source>
<translation>In- of uitschakelen staking functionaliteit voor SAPP inputs (0-1, standaard: %u)</translation>
</message>
<message>
<source>Enable or disable staking functionality for zSAP inputs (0-1, default: %u)</source>
<translation>In- of uitschakelen staking functionaliteit voor zSAP inputs (0-1, standaard: %u)</translation>
</message>
<message>
<source>Enable spork administration functionality with the appropriate private key.</source>
<translation>Activeer de spork administratie functionaliteit met de juiste private sleutel.</translation>
</message>
<message>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation>Voer de regressietest modus uit, die een speciale chain gebruikt waarin blokken direct kunnen worden opgelost.</translation>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation>Fout: het luisteren naar inkomende verbindingen is mislukt (luister terug fout %s)</translation>
</message>
<message>
<source>Error: The transaction is larger than the maximum allowed transaction size!</source>
<translation>Fout: de transactie is groter dan de maximaal toegestane transactiegrootte!</translation>
</message>
<message>
<source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source>
<translation>Fout: Niet ondersteund argument -socks gevonden. Het instellen van SOCKS versie is niet meer mogelijk, alleen SOCKS5 proxy's worden ondersteund.</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Uitvoeren commando wanneer een relevante waarschuwing is ontvangen of we zien een echt lange fork (%s in cmd wordt vervangen door bericht)</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Uitvoeren commando wanneer een portemonnee transactie verandert (%s in cmd wordt vervangen door TxID)</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Uitvoeren commando wanneer het beste blok verandert (%s in cmd is vervangen door block hash)</translation>
</message>
<message>
<source>Fees (in SAPP/Kb) smaller than this are considered zero fee for relaying (default: %s)</source>
<translation>Kosten (in SAPP/Kb) kleiner dan dit worden beschouwd als zero fee voor heruitzending (standaard: %s)</translation>
</message>
<message>
<source>Fees (in SAPP/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source>
<translation>Kosten (in SAPP/Kb) kleiner dan dit worden beschouwd als zero fee voor transactie verrichting (standaard: %s)</translation>
</message>
<message>
<source>Flush database activity from memory pool to disk log every <n> megabytes (default: %u)</source>
<translation>Leeg database activiteit uit geheugen pool naar schijf log elke keer<n>megabytes (default: %u)</translation>
</message>
<message>
<source>Found unconfirmed denominated outputs, will wait till they confirm to continue.</source>
<translation>Gevonden onbevestigde gedenomineerde outputs, wachten tot ze bevestigd zijn om verder te gaan.</translation>
</message>
<message>
<source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source>
<translation>Als paytxfee niet is ingesteld, sluit voldoende kosten in, zodat transacties beginnen te confirmeren binnen gemiddeld n blokken (standaard: %u)</translation>
</message>
<message>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation>In deze modus controleert -genproclimit hoeveel blokken er onmiddellijk worden gegenereerd.</translation>
</message>
<message>
<source>Insufficient or insufficient confirmed funds, you might need to wait a few minutes and try again.</source>
<translation>Onvoldoende of onvoldoende bevestigd geld, u moet mogelijk een paar minuten wachten en het opnieuw proberen.</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source>
<translation>Ongeldige hoeveelheid voor -maxtxfee=<amount>: '%s' (moet ten minste de minimale vergoeding van %s zijn om hangende transacties te voorkomen)</translation>
</message>
<message>
<source>Keep the specified amount available for spending at all times (default: 0)</source>
<translation>Houd het gespecificeerde bedrag altijd beschikbaar voor uitgaven te allen tijde (standaard: 0)</translation>
</message>
<message>
<source>Log transaction priority and fee per kB when mining blocks (default: %u)</source>
<translation>Log transactie prioriteit en vergoeding per kB wanneer blokken worden gemined (standaard: %u)</translation>
</message>
<message>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation>Onderhoud een volledige transactie index, gebruikt door de getrawtransaction rpc call (standaard: %u)</translation>
</message>
<message>
<source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source>
<translation>Maximale gegevensgrootte in data carrier transacties die we relayen en minen (standaard: %u)</translation>
</message>
<message>
<source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source>
<translation>Maximale totale kosten die in een enkele portefeuille kunnen worden gebruikt, een te lage instelling kan grote transacties afbreken (standaard: %s)</translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation>Aantal seconden om te voorkomen dat misdragende peers opnieuw connectoren (standaard: %u)</translation>
</message>
<message>
<source>Obfuscation uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source>
<translation>Verduistering maakt gebruik van exacte gedenomineerde bedragen om fondsen te verzenden, je zou misschien nog wat munten moeten anonimiseren.</translation>
</message>
<message>
<source>Output debugging information (default: %u, supplying <category> is optional)</source>
<translation>Output debugging informatie (default: %u, verschaffen <category> is optioneel)</translation>
</message>
<message>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation>Query voor peer adressen via DNS lookup, als er weinig adressen zijn (standaard: 1 tenzij -connect)</translation>
</message>
<message>
<source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source>
<translation>Willekeurige credentials voor elke proxy verbinding. Dit stelt Tor stream isolatie in staat (standaard: %u)</translation>
</message>
<message>
<source>Require high priority for relaying free or low-fee transactions (default:%u)</source>
<translation>Vereist hoge prioriteit voor het relayen van gratis of low-fee transacties (standaard: %u)</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file (default: %u)</source>
<translation>Stuur trace/debug info naar console in plaats van debug.log bestand (standaard: %u)</translation>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Stel maximale grootte van transacties met hoge prioriteit/low-fee in bytes in (standaard: %d)</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Stel het aantal script verificatie threads (%u tot %d, 0 = auto, <0 = laat dat aantal kernen vrij, standaard: %d)</translation>
</message>
<message>
<source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source>
<translation>Stel het aantal threads voor munt generatie in indien geactiveerd (-1 = alle kernen, standaard: %d)</translation>
</message>
<message>
<source>Show N confirmations for a successfully locked transaction (0-9999, default: %u)</source>
<translation>Toon N bevestigingen voor een succesvol opgesloten transactie (0-9999, standaard: %u)</translation>
</message>
<message>
<source>Support filtering of blocks and transaction with bloom filters (default: %u)</source>
<translation>Ondersteun filteren van blokken en transactie met bloom filters (standaard: %u)</translation>
</message>
<message>
<source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit <https://www.openssl.org/> and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source>
<translation>Dit product bevat software ontwikkeld door het OpenSSL Project voor gebruik in de OpenSSL Toolkit <https://www.openssl.org/> en cryptografische software geschreven door Eric Young en UPnP software geschreven door Thomas Bernard.</translation>
</message>
<message>
<source>Unable to bind to %s on this computer. Sapphire Core is probably already running.</source>
<translation>Niet mogelijk te binden aan %s op deze computer. Sapphire Core loopt waarschijnlijk al.</translation>
</message>
<message>
<source>Unable to locate enough Obfuscation denominated funds for this transaction.</source>
<translation>Kan niet genoeg verduistering gedenomineerde fondsen voor deze transactie vinden.</translation>
</message>
<message>
<source>Unable to locate enough Obfuscation non-denominated funds for this transaction that are not equal 10000 SAPP.</source>
<translation>Kan niet genoeg verduistering niet gedenomineerde fondsen voor deze transactie vinden die niet gelijk zijn aan 10000 SAPP.</translation>
</message>
<message>
<source>Unable to locate enough funds for this transaction that are not equal 10000 SAPP.</source>
<translation>Kan niet genoeg fondsen voor deze transactie vinden die niet gelijk zijn aan 10000 SAPP.</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source>
<translation>Gebruik aparte SOCKS5 proxy om peers via Tor verborgen services te bereiken (standaard: %s)</translation>
</message>
<message>
<source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source>
<translation>Waarschuwing: -maxtxfee is zeer hoog ingesteld! Deze hoge kosten kunnen worden betaald op een enkele transactie.</translation>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Waarschuwing: -paytxfee is zeer hoog ingesteld! Dit zijn de transactie kosten die je betaalt als je een transactie verstuurt.</translation>
</message>
<message>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Sapphire Core will not work properly.</source>
<translation>Waarschuwing: Controleer of de datum en tijd van je computer juist zijn! Als je klok verkeerd staat, werkt Sapphire Core niet goed.</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Waarschuwing: het netwerk lijkt er niet helemaal mee eens te zijn! Sommige miners lijken problemen te ondervinden.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Waarschuwing: Wij lijken het er niet helemaal eens te zijn met onze peers! Mogelijk moet je upgraden, of andere nodes moeten mogelijk upgraden.</translation>
</message>
<message>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Waarschuwing: fout lezen wallet.dat! Alle sleutels lezen correct, maar transactie gegevens of adresboek invoeringen kunnen missen of niet correct zijn.</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Waarschuwing: wallet.dat corrupt, data gered! Originele wallet.dat opgeslagen als wallet.{timestamp}.bak in %s; als je saldo of transacties onjuist zijn, moet je een back-up herstellen.</translation>
</message>
<message>
<source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source>
<translation>Whitelist peers verbinden van het opgegeven netmask of IP adres. Kan meerdere keren worden opgegeven.</translation>
</message>
<message>
<source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source>
<translation>Whitelisted peers kunnen niet DoS banned worden en hun transacties worden altijd doorgestuurd, zelfs als ze al in de mempool zijn, nuttig bijv. voor een gateway</translation>
</message>
<message>
<source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source>
<translation>Je moet een masternodeprivkey opgeven in de configuratie. Raadpleeg de documentatie voor hulp.</translation>
</message>
<message>
<source>(45328 could be used only on mainnet)</source>
<translation>(45328 kan alleen op mainnet worden gebruikt)</translation>
</message>
<message>
<source>(default: %s)</source>
<translation>(standaard: %s)</translation>
</message>
<message>
<source>(default: 1)</source>
<translation>(standaard: 1)</translation>
</message>
<message>
<source>(must be 45328 for mainnet)</source>
<translation>(moet 45328 voor mainnet zijn)</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Accepteer command line en JSON-RPC commando's</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Accepteer verbindingen van buitenaf (standaard: 1 als geen -proxy of -connect)</translation>
</message>
<message>
<source>Accept public REST requests (default: %u)</source>
<translation>Accepteer publieke REST verzoeken (standaard: %u)</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Voeg een node toe om verbinding mee te maken en probeer de verbinding open te houden</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>DNS lookups toestaan voor -addnode, -seednode en -connect</translation>
</message>
<message>
<source>Already have that input.</source>
<translation>Heeft die input al.</translation>
</message>
<message>
<source>Always query for peer addresses via DNS lookup (default: %u)</source>
<translation>Vraag altijd naar peer adressen via DNS lookup (standaard: %u)</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Poog om privé sleutels te herstellen van een corrupte wallet.dat</translation>
</message>
<message>
<source>Automatically create Tor hidden service (default: %d)</source>
<translation>Creëer automatisch de Tor verborgen service (standaard: %d)</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Block creatie opties:</translation>
</message>
<message>
<source>Calculating missing accumulators...</source>
<translation>Calculeren van ontbrekende accumulators...</translation>
</message>
<message>
<source>Can't denominate: no compatible inputs left.</source>
<translation>Kan niet denomineren: er zijn geen compatibele inputs over.</translation>
</message>
<message>
<source>Can't find random Masternode.</source>
<translation>Kan geen willekeurige Masternode vinden.</translation>
</message>
<message>
<source>Can't mix while sync in progress.</source>
<translation>Kan niet mixen terwijl synchronisatie wordt uitgevoerd.</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Kan de portemonnee niet downgraden</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Kan -bind adres niet oplossen: '%s'</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Kan -externalip adres niet oplossen: '%s'</translation>
</message>
<message>
<source>Cannot resolve -whitebind address: '%s'</source>
<translation>Kan -whitebind adres niet oplossen: '%s'</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Kan standaard adres niet schrijven</translation>
</message>
<message>
<source>Collateral not valid.</source>
<translation>Terugbetaling niet geldig.</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Verbind alleen met de opgegeven node(s)</translation>
</message>
<message>
<source>Connect through SOCKS5 proxy</source>
<translation>Verbind via SOCKS5 proxy</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Verbind met een node om peer adressen te verkrijgen en verbreek verbinding</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Connectie opties:</translation>
</message>
<message>
<source>Copyright (C) 2009-%i The Bitcoin Core Developers</source>
<translation>Copyright (C) 2009-%i The Bitcoin Kernontwikkelaars</translation>
</message>
<message>
<source>Copyright (C) 2014-%i The Dash Core Developers</source>
<translation>Copyright (C) 2014-%i The Dash Kernontwikkelaars</translation>
</message>
<message>
<source>Copyright (C) 2015-%i The Sapphire Core Developers</source>
<translation>Copyright (C) 2015-%i The SAPP Kernontwikkelaars</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Corrupte block database gedetecteerd</translation>
</message>
<message>
<source>Could not parse masternode.conf</source>
<translation>Kan masternode.conf niet parsen</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation>Debugging/Test opties:</translation>
</message>
<message>
<source>Delete blockchain folders and resync from scratch</source>
<translation>Verwijder blockchain mappen en hersynchroniseer alles opnieuw</translation>
</message>
<message>
<source>Disable OS notifications for incoming transactions (default: %u)</source>
<translation>Schakel OS notificaties uit voor inkomende transacties (standaard: %u)</translation>
</message>
<message>
<source>Disable safemode, override a real safe mode event (default: %u)</source>
<translation>Schakel safe mode uit, override een echte safe mode gebeurtenis (standaard: %u)</translation>
</message>
<message>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Ontdek eigen IP adres (standaard: 1 bij luisteren en niet -externalip)</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>Laad de portemonnee niet in en schakel portemonnee RPC oproepen uit</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Wil je de blok database nu herbouwen?</translation>
</message>
<message>
<source>Done loading</source>
<translation>Klaar met laden</translation>
</message>
<message>
<source>Enable automatic Zerocoin minting (0-1, default: %u)</source>
<translation>Zet automatische Zerocoin minting aan (0-1, standaard: %u)</translation>
</message>
<message>
<source>Enable publish hash transaction (locked via SwiftX) in <address></source>
<translation>Activeer publicatie hash transactie (vergrendeld via SwiftTX) in <address></translation>
</message>
<message>
<source>Enable publish raw transaction (locked via SwiftX) in <address></source>
<translation>Activeer publicatie raw transactie (vergrendeld via SwiftTX) in <address></translation>
</message>
<message>
<source>Enable the client to act as a masternode (0-1, default: %u)</source>
<translation>Schakel de client in als masternode (0-1, standaard: %u)</translation>
</message>
<message>
<source>Entries are full.</source>
<translation>De entries zijn vol.</translation>
</message>
<message>
<source>Error connecting to Masternode.</source>
<translation>Fout bij verbinden met Masternode.</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Fout bij het initialiseren van blok database</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Fout bij het initialiseren van de wallet database omgeving %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Error tijdens het laden van de block database</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Error tijdens het laden van wallet.dat</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Error tijdens het laden van wallet.dat: Portemonnee corrupt</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet requires newer version of Sapphire Core</source>
<translation>Fout bij het laden van wallet.dat: Portemonnee vereist een nieuwere versie van Sapphire Core</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Error tijdens het openen van de block database</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Error tijdens het lezen van de database, aan het afsluiten.</translation>
</message>
<message>
<source>Error recovering public key.</source>
<translation>Fout bij het herstellen van de publieke sleutel.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>Error: A fatal internal error occured, see debug.log for details</source>
<translation>Fout: Er is een fatale interne fout opgetreden, zie debug.log voor details</translation>
</message>
<message>
<source>Error: Can't select current denominated inputs</source>
<translation>Fout: Kan de huidige gedenomineerde inputs niet selecteren</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Error: Schijfruimte is laag!</translation>
</message>
<message>
<source>Error: Unsupported argument -tor found, use -onion.</source>
<translation>Fout: Niet ondersteunde argument -tor gevonden, gebruik -onion.</translation>
</message>
<message>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Fout: Portemonnee vergrendeld, niet in staat om transactie te creëren!</translation>
</message>
<message>
<source>Error: You already have pending entries in the Obfuscation pool</source>
<translation>Fout: U heeft al entries in afwachting in de verduistering pool</translation>
</message>
<message>
<source>Failed to calculate accumulator checkpoint</source>
<translation>Kon het controlepunt van de accumulator niet berekenen</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Niet gelukt om te luisteren op een poort. Gebruik -listen=0 als je dit wilt.</translation>
</message>
<message>
<source>Failed to read block</source>
<translation>Mislukt om block te lezen</translation>
</message>
<message>
<source>Fee (in SAPP/kB) to add to transactions you send (default: %s)</source>
<translation>Fee (in SAPP/kB) om toe te voegen aan transacties die je verzendt (standaard: %s)</translation>
</message>
<message>
<source>Finalizing transaction.</source>
<translation>Transactie aan het voltooien.</translation>
</message>
<message>
<source>Force safe mode (default: %u)</source>
<translation>Forceer safe mode (standaard: %u)</translation>
</message>
<message>
<source>Found enough users, signing ( waiting %s )</source>
<translation>Genoeg gebruikers gevonden, aan het ondertekenen (%s aan het wachten)</translation>
</message>
<message>
<source>Found enough users, signing ...</source>
<translation>Genoeg gebruikers gevonden, aan het ondertekenen ...</translation>
</message>
<message>
<source>Generate coins (default: %u)</source>
<translation>Genereer munten (standaard: %u)</translation>
</message>
<message>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation>Hoeveel blokken bij het opstarten controleren (standaard: %u, 0 = alles)</translation>
</message>
<message>
<source>If <category> is not supplied, output all debugging information.</source>
<translation>Als <category> niet is opgegeven, output alle debugging informatie.</translation>
</message>
<message>
<source>Importing...</source>
<translation>Importeren...</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importeert blokken uit extern blk000??.dat bestand</translation>
</message>
<message>
<source>Include IP addresses in debug output (default: %u)</source>
<translation>Inclusief IP adressen in debug output (standaard: %u)</translation>
</message>
<message>
<source>Incompatible mode.</source>
<translation>Modus is niet compatibel.</translation>
</message>
<message>
<source>Incompatible version.</source>
<translation>Versie is niet compatibel.</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Het genesis block kan niet worden gevonden of is incorrect. Klopt datadir voor het netwerk?</translation>
</message>
<message>
<source>Information</source>
<translation>Informatie</translation>
</message>
<message>
<source>Initialization sanity check failed. Sapphire Core is shutting down.</source>
<translation>Initialisatie saniteitscontrole mislukt. Sapphire Core wordt afgesloten.</translation>
</message>
<message>
<source>Input is not valid.</source>
<translation>Ongeldige invoer.</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Onvoldoende saldo.</translation>
</message>
<message>
<source>Insufficient funds.</source>
<translation>Onvoldoende saldo.</translation>
</message>
<message>
<source>Invalid -onion address or hostname: '%s'</source>
<translation>Ongeldig -onion adres of hostnaam: '%s'</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s'</source>
<translation>Ongeldige hoeveelheid voor -maxtxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Ongeldige hoeveelheid voor -minrelaytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Ongeldige hoeveelheid voor -mintxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</source>
<translation>Ongeldige hoeveelheid voor -paytxfee=<amount>: '%s' (moet tenminste %s zijn)</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Ongeldige hoeveelheid voor -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -reservebalance=<amount></source>
<translation>Ongeldige hoeveelheid voor -reservebalance=<amount></translation>
</message>
<message>
<source>Invalid amount</source>
<translation>Ongeldige hoeveelheid</translation>
</message>
<message>
<source>Invalid masternodeprivkey. Please see documenation.</source>
<translation>Ongeldige masternodeprivkey. Zie documentatie.</translation>
</message>
<message>
<source>Invalid netmask specified in -whitelist: '%s'</source>
<translation>Ongeldige netmask opgegeven in -whitelist: '%s'</translation>
</message>
<message>
<source>Invalid port detected in masternode.conf</source>
<translation>Ongeldige poort gedetecteerd in masternode.conf</translation>
</message>
<message>
<source>Invalid private key.</source>
<translation>Ongeldige privésleutel.</translation>
</message>
<message>
<source>Invalid script detected.</source>
<translation>Ongeldige script gedetecteerd.</translation>
</message>
<message>
<source>Percentage of automatically minted Zerocoin (1-100, default: %u)</source>
<translation>Percentage automatisch geminte Zerocoin (10-100, standaard: %u)</translation>
</message>
<message>
<source>Reindex the SAPP and zSAP money supply statistics</source>
<translation>Indexeer de SAPP- en zSAP-geldvoorraadstatistieken opnieuw</translation>
</message>
<message>
<source>Reindexing zerocoin database...</source>
<translation>Her-indexeren Zerocoin database...</translation>
</message>
<message>
<source>Reindexing zerocoin failed</source>
<translation>Her-indexeren Zerocoin database mislukt</translation>
</message>
<message>
<source>Selected coins value is less than payment target</source>
<translation>Geselecteerde munt waarde is minder dan het betalingsdoel</translation>
</message>
<message>
<source>SwiftX options:</source>
<translation>SwiftTX opties:</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for staking or merchant applications!</source>
<translation>Dit is een pre-release test build - gebruik op eigen risico - niet gebruiken voor staking of handel applicaties!</translation>
</message>
<message>
<source> mints deleted
</source>
<translation>mints verwijderd
</translation>
</message>
<message>
<source> mints updated, </source>
<translation>mints bijgewerkt,</translation>
</message>
<message>
<source> unconfirmed transactions removed
</source>
<translation>onbevestigde transacties verwijderd
</translation>
</message>
<message>
<source>Disable all SAPP specific functionality (Masternodes, Zerocoin, SwiftX, Budgeting) (0-1, default: %u)</source>
<translation>Schakel alle specifieke SAPP functionaliteit uit (Masternodes, Zerocoin, SwiftTX, Budgeting) (0-1, standaard: %u)</translation>
</message>
<message>
<source>Enable SwiftX, show confirmations for locked transactions (bool, default: %s)</source>
<translation>Schakel SwiftX in, toon bevestigingen voor vergrendelde transacties (bool, standaard: %s)</translation>
</message>
<message>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Fout: De transactie is afgewezen! Dit kan gebeuren als sommige munten in je portemonnee al waren uitgegeven, bijvoorbeeld als je een kopie van wallet.dat gebruikt en munten in de kopie waren besteed maar hier niet als zodanig gemarkeerd.</translation>
</message>
<message>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Fout: Deze transactie vereist transactiekosten van ten minste %s vanwege de hoeveelheid, de complexiteit of het gebruik van recent ontvangen fondsen!</translation>
</message>
<message>
<source>Error: Unsupported argument -checklevel found. Checklevel must be level 4.</source>
<translation>Fout: Niet ondersteund argument -checklevel gevonden. Checklevel moet niveau 4 zijn.</translation>
</message>
<message>
<source>Execute command when the best block changes and its size is over (%s in cmd is replaced by block hash, %d with the block size)</source>
<translation>Voer het commando uit als het beste blok verandert en de grootte ervan voorbij is (%s in cmd wordt vervangen door blokhash, %d met de blokgrootte)</translation>
</message>
<message>
<source>Failed to find coin set amongst held coins with less than maxNumber of Spends</source>
<translation>Het is niet gelukt om muntstukken te vinden onder de aangehouden munten met minder dan max. Aantal uitgaven</translation>
</message>
<message>
<source>In rare cases, a spend with 7 coins exceeds our maximum allowable transaction size, please retry spend using 6 or less coins</source>
<translation>In zeldzame gevallen overschrijdt een besteding met 7 munten uw maximaal toegestane transactiegrootte. Probeer het opnieuw met 6 of minder munten</translation>
</message>
<message>
<source>Preferred Denomination for automatically minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 for no preference. default: %u)</source>
<translation>Voorkeur denominatie voor automatisch minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 voor geen voorkeur. standaard: %u)</translation>
</message>
<message>
<source>Specify custom backup path to add a copy of any automatic zSAP backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup. If backuppath is set as well, 4 backups will happen</source>
<translation>Specificeer aangepast back-up pad om een kopie van elke automatische zSAP back-up toe te voegen. Indien ingesteld als dir zal elke back-up een bestand genereren met een tijdstempel. Indien ingesteld als file zal elke back-up herschreven worden naar dat bestand. Indien backuppath ook is ingesteld zullen 4 back-ups gedaan worden.</translation>
</message>
<message>
<source>Specify custom backup path to add a copy of any wallet backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup.</source>
<translation>Specificeer aangepast back-up pad om een kopie van elke portemonnee back-up toe te voegen. Indien ingesteld als dir zal elke back-up een bestand genereren met een tijdstempel. Indien ingesteld als file zal elke back-up herschreven worden naar dat bestand.</translation>
</message>
<message>
<source>SwiftX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source>
<translation>SwiftTX vereist invoer van tenminste 6 bevestigingen, je dient wellicht een paar minuten te wachten en het opnieuw proberen.</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category>kan zijn: </translation>
</message>
<message>
<source>Attempt to force blockchain corruption recovery</source>
<translation>Poging om blockchain corruptie herstel te forceren</translation>
</message>
<message>
<source>CoinSpend: Accumulator witness does not verify</source>
<translation>CoinSpend: Accumulator witness controleert niet</translation>
</message>
<message>
<source>Display the stake modifier calculations in the debug.log file.</source>
<translation>Toon de berekeningen van de stake modificator in het debug.log bestand.</translation>
</message>
<message>
<source>Display verbose coin stake messages in the debug.log file.</source>
<translation>Toon verbose munt stake berichten in het debug.log bestand.</translation>
</message>
<message>
<source>Enable publish hash block in <address></source>
<translation>Activeer publicatie hash blok in <address></translation>
</message>
<message>
<source>Enable publish hash transaction in <address></source>
<translation>Activeer publicatie has transactie in <address></translation>
</message>
<message>
<source>Enable publish raw block in <address></source>
<translation>Activeer publicatie raw block in <address></translation>
</message>
<message>
<source>Enable publish raw transaction in <address></source>
<translation>Activeer publicatie raw transactie in <address></translation>
</message>
<message>
<source>Enable staking functionality (0-1, default: %u)</source>
<translation>Activeer staking functionaliteit (0-1, standaard: %u)</translation>
</message>
<message>
<source>Error: A fatal internal error occurred, see debug.log for details</source>
<translation>Fout: Er is een fatale interne fout opgetreden, zie debug.log voor meer informatie</translation>
</message>
<message>
<source>Error: No valid utxo!</source>
<translation>Fout: geen geldige utxo!</translation>
</message>
<message>
<source>Failed to create mint</source>
<translation>Het is niet gelukt om mint te maken</translation>
</message>
<message>
<source>Failed to deserialize</source>
<translation>Kan deserialiseren niet</translation>
</message>
<message>
<source>Failed to find Zerocoins in wallet.dat</source>
<translation>Niet gelukt om Zerocoins in wallet.dat te vinden.</translation>
</message>
<message>
<source>Failed to select a zerocoin</source>
<translation>Het is niet gelukt om een zerocoin te selecteren</translation>
</message>
<message>
<source>Failed to wipe zerocoinDB</source>
<translation>Mislukt om zerocoinDB te legen.</translation>
</message>
<message>
<source>Failed to write coin serial number into wallet</source>
<translation>Het serienummer van het muntstuk kan niet in de portemonnee worden geschreven</translation>
</message>
<message>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation>Houd maximaal <n>niet te verbinden transacties in het geheugen (standaard: %u)</translation>
</message>
<message>
<source>Last Obfuscation was too recent.</source>
<translation>Laatste verduistering was te recent.</translation>
</message>
<message>
<source>Last successful Obfuscation action was too recent.</source>
<translation>Laatste succesvolle verduistering actie was te recent.</translation>
</message>
<message>
<source>Limit size of signature cache to <n> entries (default: %u)</source>
<translation>Limiet grootte van signature cache naar <n> invoer (standaard: %u)</translation>
</message>
<message>
<source>Line: %d</source>
<translation>Line: %d</translation>
</message>
<message>
<source>Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</source>
<translation>Luister naar JSON-RPC verbindingen op <port> (standaard: %u of testnet: %u)</translation>
</message>
<message>
<source>Listen for connections on <port> (default: %u or testnet: %u)</source>
<translation>Luister naar verbindingen op <port> (standaard: %u of testnet: %u)</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Adressen laden...</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Blockindex laden...</translation>
</message>
<message>
<source>Loading budget cache...</source>
<translation>Budget cache laden...</translation>
</message>
<message>
<source>Loading masternode cache...</source>
<translation>Masternode cache laden...</translation>
</message>
<message>
<source>Loading masternode payment cache...</source>
<translation>Masternode betalingscache laden...</translation>
</message>
<message>
<source>Loading sporks...</source>
<translation>Sporks laden...</translation>
</message>
<message>
<source>Loading wallet... (%3.2f %%)</source>
<translation>Portemonnee laden... (%3.2f %%)</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Portemonnee aan het laden...</translation>
</message>
<message>
<source>Location of the auth cookie (default: data dir)</source>
<translation>Locatie van de auth cookie (standaard: data dir)</translation>
</message>
<message>
<source>Lock is already in place.</source>
<translation>Vergrendeling is al uitgevoerd.</translation>
</message>
<message>
<source>Lock masternodes from masternode configuration file (default: %u)</source>
<translation>Masternodes vergrendelen van masternode configuratiebestand (standaard: %u)</translation>
</message>
<message>
<source>Lookup(): Invalid -proxy address or hostname: '%s'</source>
<translation>Lookup (): ongeldig -proxy-adres of hostnaam: '%s'</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation>Bijhouden maximaal <n> connecties naar peers (standaard: %u)</translation>
</message>
<message>
<source>Masternode options:</source>
<translation>Masternode opties:</translation>
</message>
<message>
<source>Masternode queue is full.</source>
<translation>Masternode wachtrij zit vol.</translation>
</message>
<message>
<source>Masternode:</source>
<translation>Masternode:</translation>
</message>
<message>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</source>
<translation>Maximaal per connectie ontvangst buffer, <n>*1000 bytes (standaard: %u)</translation>
</message>
<message>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</source>
<translation>Maximaal per connectie verstuur buffer, <n>*1000 bytes (standaard: %u)</translation>
</message>
<message>
<source>Mint did not make it into blockchain</source>
<translation>Mint heeft de blockchain niet gehaald</translation>
</message>
<message>
<source>Missing input transaction information.</source>
<translation>Ontbrekende invoer transactie informatie ontbreekt.</translation>
</message>
<message>
<source>Mixing in progress...</source>
<translation>Bezig met mixen...</translation>
</message>
<message>
<source>Need address because change is not exact</source>
<translation>Noodzaak van adres omdat wijziging niet exact is</translation>
</message>
<message>
<source>Need to specify a port with -whitebind: '%s'</source>
<translation>Moet een poort opgeven met -whitebind: '%s'</translation>
</message>
<message>
<source>No Masternodes detected.</source>
<translation>Geen Masternodes gedetecteerd.</translation>
</message>
<message>
<source>No compatible Masternode found.</source>
<translation>Geen compatibele Masternode gevonden.</translation>
</message>
<message>
<source>No funds detected in need of denominating.</source>
<translation>Geen fondsen gedetecteerd die denominatie nodig hebben.</translation>
</message>
<message>
<source>No matching denominations found for mixing.</source>
<translation>Geen passende denominaties gevonden voor mixing.</translation>
</message>
<message>
<source>Node relay options:</source>
<translation>Node relay opties:</translation>
</message>
<message>
<source>Non-standard public key detected.</source>
<translation>Niet standaard publieke sleutel gedetecteerd.</translation>
</message>
<message>
<source>Not compatible with existing transactions.</source>
<translation>Niet compatibel met bestaande transacties.</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Niet genoeg bestandsbeschrijvingen beschikbaar.</translation>
</message>
<message>
<source>Not in the Masternode list.</source>
<translation>Niet in de Masternode lijst.</translation>
</message>
<message>
<source>Number of automatic wallet backups (default: 10)</source>
<translation>Aantal automatische portemonnee backups (standaard: 10)</translation>
</message>
<message>
<source>Number of custom location backups to retain (default: %d)</source>
<translation>Aantal aangepaste locatie back-ups om te behouden (standaard: %d)</translation>
</message>
<message>
<source>Obfuscation is idle.</source>
<translation>Verduistering is inactief.</translation>
</message>
<message>
<source>Obfuscation request complete:</source>
<translation>Verduistering verzoek compleet:</translation>
</message>
<message>
<source>Obfuscation request incomplete:</source>
<translation>Verduistering verzoek incompleet:</translation>
</message>
<message>
<source>Only accept block chain matching built-in checkpoints (default: %u)</source>
<translation>Accepteer alleen blockchain matching met ingebouwde controlepunten (standaard: %u)</translation>
</message>
<message>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation>Verbind alleen met nodes in het netwerk <net> (ipv4, ipv6 of onion)</translation>
</message>
<message>
<source>Options:</source>
<translation>Opties:</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Wachtwoord voor JSON-RPC connecties</translation>
</message>
<message>
<source>isValid(): Invalid -proxy address or hostname: '%s'</source>
<translation>isValid (): ongeldig -proxy-adres of hostnaam: '%s'</translation>
</message>
<message>
<source>Preparing for resync...</source>
<translation>Voorbereiden van hersynchronisatie...</translation>
</message>
<message>
<source>Prepend debug output with timestamp (default: %u)</source>
<translation>Voeg debug output met timestamp toe (standaard: %u)</translation>
</message>
<message>
<source>Print version and exit</source>
<translation>Print versie en verlaat</translation>
</message>
<message>
<source>RPC server options:</source>
<translation>RPC server opties:</translation>
</message>
<message>
<source>Randomly drop 1 of every <n> network messages</source>
<translation>Willekeurig laten vallen van 1 van elke <n>netwerk berichten</translation>
</message>
<message>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation>Willekeurig vervagen van 1 van elke <n> netwerk berichten</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Herstel blockchain index van huidige blk000??.dat bestanden</translation>
</message>
<message>
<source>Receive and display P2P network alerts (default: %u)</source>
<translation>Ontvang en laat P2P netwerkmeldingen zien (standaard: %u)</translation>
</message>
<message>
<source>Reindex the accumulator database</source>
<translation>Herindexeer de verzameldatabase</translation>
</message>
<message>
<source>Relay and mine data carrier transactions (default: %u)</source>
<translation>Relay en mine data carrier transacties (standaard: %u)</translation>
</message>
<message>
<source>Relay non-P2SH multisig (default: %u)</source>
<translation>Relay non P2SH multisig (default: %u)</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Rescan de blockchain voor ontbrekende portemonnee transacties</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Opnieuw scannen...</translation>
</message>
<message>
<source>ResetMintZerocoin finished: </source>
<translation>ResetMintZerocoin voltooid: </translation>
</message>
<message>
<source>ResetSpentZerocoin finished: </source>
<translation>ResetSpentZerocoin voltooid: </translation>
</message>
<message>
<source>Run a thread to flush wallet periodically (default: %u)</source>
<translation>Voer regelmatig een thread om de portemonnee te spoelen uit (standaard: %u)</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Voer op de achtergrond uit als een daemon en accepteer commando's</translation>
</message>
<message>
<source>Send transactions as zero-fee transactions if possible (default: %u)</source>
<translation>Zend transacties als zero fee transacties indien mogelijk (standaard: %u)</translation>
</message>
<message>
<source>Session not complete!</source>
<translation>Sessie niet voltooid!</translation>
</message>
<message>
<source>Session timed out.</source>
<translation>Sessie verlopen.</translation>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Stel de cache grootte van de database in megabytes in (%d tot %d, standaard: %d)</translation>
</message>
<message>
<source>Set external address:port to get to this masternode (example: %s)</source>
<translation>Extern adres instellen:poort om bij deze masternode te komen (voorbeeld: %s)</translation>
</message>
<message>
<source>Set key pool size to <n> (default: %u)</source>
<translation>Stel key pool grootte in op <n> (standaard: %u)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Stel maximale block grootte in bytes in (default: %d)</translation>
</message>
<message>
<source>Set minimum block size in bytes (default: %u)</source>
<translation>Stel minimale block grootte in bytes in (default: %u)</translation>
</message>
<message>
<source>Set the Maximum reorg depth (default: %u)</source>
<translation>Stel de Maximale reorg diepte in (standaard: %u)</translation>
</message>
<message>
<source>Set the masternode private key</source>
<translation>Stel de masternode privé sleutel in</translation>
</message>
<message>
<source>Set the number of threads to service RPC calls (default: %d)</source>
<translation>Stel het aantal threads in om RPC oproepen te bedienen (standaard: %d)</translation>
</message>
<message>
<source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source>
<translation>Stelt de DB_PRIVATE vlag in de portemonnee db omgeving in (standaard: %u)</translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Toon alle debugging opties (gebruik: --help -help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Krimp debug.log bestand bij client startup (standaard: 1 wanneer geen -debug)</translation>
</message>
<message>
<source>Signing failed.</source>
<translation>Ondertekenen mislukt.</translation>
</message>
<message>
<source>Signing timed out.</source>
<translation>Ondertekening time out.</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Ondertekening transactie mislukt.</translation>
</message>
<message>
<source>Specify configuration file (default: %s)</source>
<translation>Specificeer configuratiebestand (standaard: %s)</translation>
</message>
<message>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation>Specificeer verbinding time-out in milliseconden (minimum: 1, standaard: %d)</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Specificeer data directory.</translation>
</message>
<message>
<source>Specify masternode configuration file (default: %s)</source>
<translation>Specificeer masternode configuratie bestand (default: %s)</translation>
</message>
<message>
<source>Specify pid file (default: %s)</source>
<translation>Specificeer pid bestand (default: %s)</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Specificeer portemonnee bestand (in data directory)</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Specificeer je eigen publieke addres</translation>
</message>
<message>
<source>Spend Valid</source>
<translation>Besteed geldig</translation>
</message>
<message>
<source>Spend unconfirmed change when sending transactions (default: %u)</source>
<translation>Onbevestigd wisselgeld besteden bij het verzenden van transacties (standaard: %u)</translation>
</message>
<message>
<source>Staking options:</source>
<translation>Staking opties:</translation>
</message>
<message>
<source>Stop running after importing blocks from disk (default: %u)</source>
<translation>Stop na het importeren van blokken van schijf (standaard: %u)</translation>
</message>
<message>
<source>Submitted following entries to masternode: %u / %d</source>
<translation>Ingediende volgende vermeldingen in masternode: %u / %d</translation>
</message>
<message>
<source>Submitted to masternode, waiting for more entries ( %u / %d ) %s</source>
<translation>Ingediend naar masternode, wachten op meer inzendingen (%u / %d) %s</translation>
</message>
<message>
<source>Submitted to masternode, waiting in queue %s</source>
<translation>Ingediend naar masternode, wachten in de wachtrij %s</translation>
</message>
<message>
<source>Synchronization failed</source>
<translation>Synchronisatie mislukt</translation>
</message>
<message>
<source>Synchronization finished</source>
<translation>Synchronisatie voltooid</translation>
</message>
<message>
<source>Synchronization pending...</source>
<translation>Synchronisatie in afwachting...</translation>
</message>
<message>
<source>Synchronizing budgets...</source>
<translation>Budgeten synchroniseren...</translation>
</message>
<message>
<source>Synchronizing masternode winners...</source>
<translation>Synchroniseren masternode winnaars...</translation>
</message>
<message>
<source>Synchronizing masternodes...</source>
<translation>Synchroniseren masternodes...</translation>
</message>
<message>
<source>Synchronizing sporks...</source>
<translation>Synchroniseren sporks...</translation>
</message>
<message>
<source>Syncing SAPP wallet...</source>
<translation>Synchroniseren SAPP portemonnee...</translation>
</message>
<message>
<source>The coin spend has been used</source>
<translation>De muntuitgaven zijn gebruikt</translation>
</message>
<message>
<source>The new spend coin transaction did not verify</source>
<translation>De nieuwe uitgave voor uitgavengeld heeft niet geverifieerd</translation>
</message>
<message>
<source>The selected mint coin is an invalid coin</source>
<translation>De geselecteerde muntmunt is een ongeldige munt</translation>
</message>
<message>
<source>The transaction did not verify</source>
<translation>De transactie heeft niet geverifieerd</translation>
</message>
<message>
<source>This help message</source>
<translation>Dit help bericht</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Dit is experimentele software.</translation>
</message>
<message>
<source>This is intended for regression testing tools and app development.</source>
<translation>Dit is bedoeld voor regressie test tools en app ontwikkeling.</translation>
</message>
<message>
<source>This is not a Masternode.</source>
<translation>Dit is geen Masternode.</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation>Drempel voor het verbreken van misdragende peers (standaard: %u)</translation>
</message>
<message>
<source>Too many spends needed</source>
<translation>Te veel uitgaven nodig</translation>
</message>
<message>
<source>Tor control port password (default: empty)</source>
<translation>Tor controle poort wachtwoord (standaard: leeg)</translation>
</message>
<message>
<source>Tor control port to use if onion listening enabled (default: %s)</source>
<translation>Tor controle poort om te gebruiken als onion listening geactiveerd is (standaard: %s)</translation>
</message>
<message>
<source>Transaction Created</source>
<translation>Transactie gemaakt</translation>
</message>
<message>
<source>Transaction Mint Started</source>
<translation>Transactie startte met Mint</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Transactie bedrag te klein</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Transactie bedragen moeten positief zijn</translation>
</message>
<message>
<source>Transaction created successfully.</source>
<translation>Transactie is succesvol gemaakt.</translation>
</message>
<message>
<source>Transaction fees are too high.</source>
<translation>Transactiekosten zijn te hoog.</translation>
</message>
<message>
<source>Transaction not valid.</source>
<translation>Transactie is niet geldig.</translation>
</message>
<message>
<source>Transaction too large for fee policy</source>
<translation>Transactie te groot voor kosten beleid</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transactie te groot</translation>
</message>
<message>
<source>Transmitting final transaction.</source>
<translation>Verzending van de definitieve transactie.</translation>
</message>
<message>
<source>Try to spend with a higher security level to include more coins</source>
<translation>Probeer te spenderen met een hoger beveiligingsniveau om meer munten op te nemen</translation>
</message>
<message>
<source>Trying to spend an already spent serial #, try again.</source>
<translation>Probeer een reeds bestaand serienummer nog een keer uit te geven, probeer het opnieuw.</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation>Niet mogelijk te binden aan %s op deze computer (bind stuurt fout %s terug)</translation>
</message>
<message>
<source>Unable to find transaction containing mint</source>
<translation>Kan transactie met mint niet vinden</translation>
</message>
<message>
<source>Unable to sign spork message, wrong key?</source>
<translation>Kan sporkbericht niet tekenen, verkeerde sleutel?</translation>
</message>
<message>
<source>Unable to start HTTP server. See debug log for details.</source>
<translation>Kan HTTP-server niet starten. Zie foutopsporingslog voor details.</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Onbekend netwerk gespecificeerd in -onlynet: '%s'</translation>
</message>
<message>
<source>Unknown state: id = %u</source>
<translation>Onbekende staat: id = %u</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Upgrade portemonnee naar nieuwste formaat</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: %u)</source>
<translation>Gebruik UPnP om de luisterpoort te mappen (standaard: %u)</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Gebruik UPnP om de luisterpoort te mappen (standaard: 1 bij het luisteren)</translation>
</message>
<message>
<source>Use a custom max chain reorganization depth (default: %u)</source>
<translation>Gebruik een aangepaste max chain reorganisatie diepte (standaard: %u)</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Gebruik het test netwerk</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Gebruikersnaam voor JSON-RPC verbindingen</translation>
</message>
<message>
<source>Value is below the smallest available denomination (= 1) of zSAP</source>
<translation>Waarde is minder dan de kleinst beschikbare denominatie (=1) van zSAP</translation>
</message>
<message>
<source>Value more than Obfuscation pool maximum allows.</source>
<translation>Waarde meer dan verduistering pool maximaal toestaat.</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Blokken verifiëren...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Portemonnee verifiëren...</translation>
</message>
<message>
<source>Version 1 zSAP require a security level of 100 to successfully spend.</source>
<translation>Versie 1 zSAP vereist een beveiligingsniveau van 100 om succesvol uit te geven.</translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Portemonnee %s verblijft buiten de data directory %s</translation>
</message>
<message>
<source>Wallet is locked.</source>
<translation>Portemonnee is vergrendeld.</translation>
</message>
<message>
<source>Wallet needed to be rewritten: restart Sapphire Core to complete</source>
<translation>Wallet moest worden herschreven: start Sapphire Core opnieuw om te voltooien</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Portemonnee opties:</translation>
</message>
<message>
<source>Wallet window title</source>
<translation>Portemonnee venster titel</translation>
</message>
<message>
<source>Warning</source>
<translation>Waarschuwing</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Waarschuwing: Deze versie is verouderd, upgrade vereist!</translation>
</message>
<message>
<source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source>
<translation>Waarschuwing: Niet- ondersteund argument -benchmark genegeerd, gebruik -debug=bench.</translation>
</message>
<message>
<source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source>
<translation>Waarschuwing: Niet ondersteunde argument -debugnet genegeerd, gebruik -debug=net.</translation>
</message>
<message>
<source>Will retry...</source>
<translation>Zal het opnieuw proberen...</translation>
</message>
<message>
<source>You don't have enough Zerocoins in your wallet</source>
<translation>Je hebt niet genoeg Zerocoins in je portemonnee</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Je moet de database herbouwen met -reindex om -txindex te wijzigen</translation>
</message>
<message>
<source>Your entries added successfully.</source>
<translation>Je gegevens zijn succesvol toegevoegd.</translation>
</message>
<message>
<source>Your transaction was accepted into the pool!</source>
<translation>Je transactie is geaccepteerd in de pool!</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Verwijderen van alle transacties uit portemonnee...</translation>
</message>
<message>
<source>ZeroMQ notification options:</source>
<translation>ZeroMQ notificatie opties:</translation>
</message>
<message>
<source>Zerocoin options:</source>
<translation>Zerocoin opties:</translation>
</message>
<message>
<source>on startup</source>
<translation>tijdens het opstarten</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrupt, redding mislukt</translation>
</message>
</context>
</TS> | <translation>Controleer het adres en probeer het opnieuw.</translation> |
test_gcs_to_s3.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.operators.gcs_to_s3 import GoogleCloudStorageToS3Operator
from airflow.hooks.S3_hook import S3Hook
from tests.compat import mock
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
TASK_ID = 'test-gcs-list-operator'
GCS_BUCKET = 'test-bucket'
DELIMITER = '.csv'
PREFIX = 'TEST'
S3_BUCKET = 's3://bucket/'
MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
class TestGoogleCloudStorageToS3Operator(unittest.TestCase):
# Test1: incremental behaviour (just some files missing)
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
b.put_object(Key=MOCK_FILES[0], Body=b'testing')
# we expect all except first file in MOCK_FILES to be uploaded
# and all the MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES[1:]),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test2: All the files are already in origin and destination without replace
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_without_replace(self, mock_hook, mock_hook2):
|
# Test3: There are no files in destination bucket
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket without files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
# we expect all MOCK_FILES to be uploaded
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test4: Destination and Origin are in sync but replace all files in destination
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
# create dest bucket with all the files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
# we expect all MOCK_FILES to be uploaded and replace the existing ones
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test5: Incremental sync with replace
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
# create dest bucket with just two files (the first two files in MOCK_FILES)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES[:2]]
# we expect all the MOCK_FILES to be uploaded and replace the existing ones
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
| mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket with all the files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
# we expect nothing to be uploaded
# and all the MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual([],
uploaded_files)
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/'))) |
main.go | package mario
func maxProfit(prices []int, fee int) int {
if len(prices) < 2 |
profit := []int{0, -prices[0]} // 0: no stock, 1: has stock
for i := 1; i < len(prices); i++ {
newProfit := make([]int, 2)
newProfit[0] = max(profit[0], profit[1]+prices[i]-fee)
newProfit[1] = max(profit[1], profit[0]-prices[i])
profit = newProfit
}
return profit[0]
}
func max(a, b int) (res int) {
if a > b {
res = a
} else {
res = b
}
return
}
| {
return 0
} |
digitalocean.go | /*
Copyright 2020 The cert-manager Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package digitalocean implements a DNS provider for solving the DNS-01
// challenge using digitalocean DNS.
package digitalocean
import (
"context"
"fmt"
"os"
"strings"
"github.com/digitalocean/godo"
"golang.org/x/oauth2"
"github.com/cert-manager/cert-manager/pkg/issuer/acme/dns/util"
)
// DNSProvider is an implementation of the acme.ChallengeProvider interface
type DNSProvider struct {
dns01Nameservers []string
client *godo.Client
}
// NewDNSProvider returns a DNSProvider instance configured for digitalocean.
// The access token must be passed in the environment variable DIGITALOCEAN_TOKEN
func NewDNSProvider(dns01Nameservers []string) (*DNSProvider, error) {
token := os.Getenv("DIGITALOCEAN_TOKEN")
return NewDNSProviderCredentials(token, dns01Nameservers)
}
// NewDNSProviderCredentials uses the supplied credentials to return a
// DNSProvider instance configured for digitalocean.
func NewDNSProviderCredentials(token string, dns01Nameservers []string) (*DNSProvider, error) {
if token == "" {
return nil, fmt.Errorf("DigitalOcean token missing")
}
c := oauth2.NewClient(
context.Background(),
oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token}),
)
return &DNSProvider{
dns01Nameservers: dns01Nameservers,
client: godo.NewClient(c),
}, nil
}
| zoneName, err := util.FindZoneByFqdn(fqdn, c.dns01Nameservers)
if err != nil {
return err
}
// check if the record has already been created
records, err := c.findTxtRecord(fqdn)
if err != nil {
return err
}
for _, record := range records {
if record.Type == "TXT" && record.Data == value {
return nil
}
}
createRequest := &godo.DomainRecordEditRequest{
Type: "TXT",
Name: fqdn,
Data: value,
TTL: 60,
}
_, _, err = c.client.Domains.CreateRecord(
context.Background(),
util.UnFqdn(zoneName),
createRequest,
)
if err != nil {
return err
}
return nil
}
// CleanUp removes the TXT record matching the specified parameters
func (c *DNSProvider) CleanUp(domain, fqdn, value string) error {
zoneName, err := util.FindZoneByFqdn(fqdn, c.dns01Nameservers)
if err != nil {
return err
}
records, err := c.findTxtRecord(fqdn)
if err != nil {
return err
}
for _, record := range records {
_, err = c.client.Domains.DeleteRecord(context.Background(), util.UnFqdn(zoneName), record.ID)
if err != nil {
return err
}
}
return nil
}
func (c *DNSProvider) findTxtRecord(fqdn string) ([]godo.DomainRecord, error) {
zoneName, err := util.FindZoneByFqdn(fqdn, c.dns01Nameservers)
if err != nil {
return nil, err
}
allRecords, _, err := c.client.Domains.Records(
context.Background(),
util.UnFqdn(zoneName),
nil,
)
var records []godo.DomainRecord
// The record Name doesn't contain the zoneName, so
// lets remove it before filtering the array of record
targetName := fqdn
if strings.HasSuffix(fqdn, zoneName) {
targetName = fqdn[:len(fqdn)-len(zoneName)]
}
for _, record := range allRecords {
if util.ToFqdn(record.Name) == targetName {
records = append(records, record)
}
}
return records, err
} | // Present creates a TXT record to fulfil the dns-01 challenge
func (c *DNSProvider) Present(domain, fqdn, value string) error {
// if DigitalOcean does not have this zone then we will find out later |
lineFollowerArucoROS-checkpoint3.py | import sys
import cv2
import math
import time
import rospy
import serial
import argparse
import numpy as np
from std_srvs.srv import Empty
from turtlesim.msg import Pose
from geometry_msgs.msg import Twist
# ROS movement global variables and function definitions
x = 0
y = 0
z = 0
yaw = 0
def poseCallback(pose_message):
|
def move(speed, distance, is_forward):
velocity_message = Twist()
global x, y
x0 = x
y0 = y
if is_forward:
velocity_message.linear.x = abs(speed)
else:
velocity_message.linear.x = -abs(speed)
distance_moved = 0.0
loop_rate = rospy.Rate(10)
cmd_vel_topic = '/turtle1/cmd_vel'
velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
while True:
rospy.loginfo('Turtlesim linear movement')
velocity_publisher.publish(velocity_message)
loop_rate.sleep()
distance_moved = distance_moved + abs(0.5 * math.sqrt(((x - x0) * 2) + ((y - y0) * 2)))
if not (distance_moved < distance):
rospy.loginfo("----Reached----")
break
velocity_message.linear.x = 0
velocity_publisher.publish(velocity_message)
def rotate(angular_speed_degree, relative_angle_degree, clockwise):
global yaw
velocity_message = Twist()
velocity_message.linear.x = 0
velocity_message.linear.y = 0
velocity_message.linear.z = 0
velocity_message.angular.x = 0
velocity_message.angular.y = 0
velocity_message.angular.z = 0
theta0 = yaw
angular_speed = math.radians(abs(angular_speed_degree))
if clockwise:
velocity_message.angular.z = -abs(angular_speed)
else:
velocity_message.angular.z = abs(angular_speed)
angle_moved = 0.0
loop_rate = rospy.Rate(10)
cmd_vel_topic = '/turtle1/cmd_vel'
velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
t0 = rospy.Time.now().to_sec()
while True:
rospy.loginfo('Turtlesim rotation')
velocity_publisher.publish(velocity_message)
t1 = rospy.Time.now().to_sec()
current_angle_degree = (t1 - t0) * angular_speed_degree
loop_rate.sleep()
if current_angle_degree > relative_angle_degree:
rospy.loginfo('----Reached----')
break
velocity_message.angular.z = 0
velocity_publisher.publish(velocity_message)
rospy.init_node('turtlesim_motion_pose', anonymous=True)
cmd_vel_topic = '/turtle1/cmd_vel'
velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
position_topic = '/turtle1/pose'
pose_subscriber = rospy.Subscriber(position_topic, Pose, poseCallback)
time.sleep(2)
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-t", "--type", type=str,
default="DICT_ARUCO_ORIGINAL",
help="type of ArUCo tag to detect")
args = vars(ap.parse_args())
# define names of each possible ArUco tag OpenCV supports
ARUCO_DICT = {
"DICT_4X4_50": cv2.aruco.DICT_4X4_50,
"DICT_4X4_100": cv2.aruco.DICT_4X4_100,
"DICT_4X4_250": cv2.aruco.DICT_4X4_250,
"DICT_4X4_1000": cv2.aruco.DICT_4X4_1000,
"DICT_5X5_50": cv2.aruco.DICT_5X5_50,
"DICT_5X5_100": cv2.aruco.DICT_5X5_100,
"DICT_5X5_250": cv2.aruco.DICT_5X5_250,
"DICT_5X5_1000": cv2.aruco.DICT_5X5_1000,
"DICT_6X6_50": cv2.aruco.DICT_6X6_50,
"DICT_6X6_100": cv2.aruco.DICT_6X6_100,
"DICT_6X6_250": cv2.aruco.DICT_6X6_250,
"DICT_6X6_1000": cv2.aruco.DICT_6X6_1000,
"DICT_7X7_50": cv2.aruco.DICT_7X7_50,
"DICT_7X7_100": cv2.aruco.DICT_7X7_100,
"DICT_7X7_250": cv2.aruco.DICT_7X7_250,
"DICT_7X7_1000": cv2.aruco.DICT_7X7_1000,
"DICT_ARUCO_ORIGINAL": cv2.aruco.DICT_ARUCO_ORIGINAL,
"DICT_APRILTAG_16h5": cv2.aruco.DICT_APRILTAG_16h5,
"DICT_APRILTAG_25h9": cv2.aruco.DICT_APRILTAG_25h9,
"DICT_APRILTAG_36h10": cv2.aruco.DICT_APRILTAG_36h10,
"DICT_APRILTAG_36h11": cv2.aruco.DICT_APRILTAG_36h11
}
# verify that the supplied ArUCo tag exists and is supported by
# OpenCV
if ARUCO_DICT.get(args["type"], None) is None:
print("[INFO] ArUCo tag of '{}' is not supported".format(
args["type"]))
sys.exit(0)
# load the ArUCo dictionary and grab the ArUCo parameters
print("[INFO] detecting '{}' tags...".format(args["type"]))
arucoDict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_4X4_250)
arucoParams = cv2.aruco.DetectorParameters_create()
# initialize the video stream and allow the camera sensor to warm up
print("[INFO] starting video stream...")
cap = cv2.VideoCapture(2)
c1 = 0
linecolor = (100, 215, 255)
lwr_red = np.array([0, 0, 0])
upper_red = np.array([179, 65, 55])
countl = False
countr = False
Ser = serial.Serial("/dev/ttyUSB0", baudrate=9600)
Ser.flush()
width = cap.get(3)
while True:
ret, frame = cap.read()
if not ret:
_, frame = cap.read()
# detect ArUco markers in the input frame
(corners, ids, rejected) = cv2.aruco.detectMarkers(frame,
arucoDict, parameters=arucoParams)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
kernel = np.ones((5, 5), np.uint8)
mask = cv2.inRange(hsv, lwr_red, upper_red)
mask = cv2.dilate(mask, kernel, iterations=1)
res = cv2.bitwise_and(frame, frame, mask=mask)
cnts, _ = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
center = None
# verify at least one ArUco marker was detected
if len(corners) > 0:
# flatten the ArUco IDs list
ids = ids.flatten()
# loop over the detected ArUCo corners
for (markerCorner, markerID) in zip(corners, ids):
# extract the marker corners (which are always returned
# in top-left, top-right, bottom-right, and bottom-left
# order)
corners = markerCorner.reshape((4, 2))
(topLeft, topRight, bottomRight, bottomLeft) = corners
# convert each of the (x, y)-coordinate pairs to integers
topRight = (int(topRight[0]), int(topRight[1]))
bottomRight = (int(bottomRight[0]), int(bottomRight[1]))
bottomLeft = (int(bottomLeft[0]), int(bottomLeft[1]))
topLeft = (int(topLeft[0]), int(topLeft[1]))
# draw the bounding box of the ArUCo detection
cv2.line(frame, topLeft, topRight, (0, 255, 0), 2)
cv2.line(frame, topRight, bottomRight, (0, 255, 0), 2)
cv2.line(frame, bottomRight, bottomLeft, (0, 255, 0), 2)
cv2.line(frame, bottomLeft, topLeft, (0, 255, 0), 2)
# compute and draw the center (x, y)-coordinates of the
# ArUco marker
cX = int((topLeft[0] + bottomRight[0]) / 2.0)
cY = int((topLeft[1] + bottomRight[1]) / 2.0)
cv2.circle(frame, (cX, cY), 4, (0, 0, 255), -1)
# draw the ArUco marker ID on the frame
cv2.putText(frame, str(markerID),
(topLeft[0], topLeft[1] - 15),
cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 0), 2)
if markerID == 0:
if not countl:
countr = False
countl=True
i='f'
for lp in range(12):
Ser.write(i.encode())
move(1, 1, True)
time.sleep(0.1)
cv2.putText(frame, '<--', (5, 50), cv2.FONT_HERSHEY_COMPLEX, 2, (0, 0, 255), 2, cv2.LINE_AA)
print("Left")
i = 'l' # left turn
for lp in range(6):
Ser.write(i.encode())
rotate(30, 10, False)
time.sleep(0.5)
i='f'
for lp in range(7):
Ser.write(i.encode())
move(1, 1, True)
time.sleep(0.1)
elif markerID == 1:
if not countr:
countl = False
countr=True
i='f'
for lp in range(8):
Ser.write(i.encode())
move(1, 1, True)
time.sleep(0.1)
i = 'r' # left turn
cv2.putText(frame, '-->', (5, 50), cv2.FONT_HERSHEY_COMPLEX, 2, (0, 0, 255), 2, cv2.LINE_AA)
print("Right")
for lp in range(6):
Ser.write(i.encode())
rotate(30, 10, True)
time.sleep(0.5)
else:
i = 'x'
Ser.write(i.encode())
print("Invalid")
if len(cnts) > 0:
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
if radius > 3:
# cv2.circle(frame, (int(x), int(y)), int(radius), (255, 255, 255), 2)
cv2.circle(frame, center, 5, linecolor, -1)
if (x > 0.25 * width and x <= 0.75 * width):
print('Forward')
cv2.putText(frame, '^', (5, 50), cv2.FONT_HERSHEY_COMPLEX, 2, (0, 0, 255), 2, cv2.LINE_AA)
Ser.write(b'f')
move(1, 1, True)
# time.sleep(0.01)
else:
print("Track Not Visible")
c1 += 1
if (c1 == 5):
print("Backward")
cv2.putText(frame, 'V', (5, 50), cv2.FONT_HERSHEY_COMPLEX, 2, (0, 0, 255), 2, cv2.LINE_AA)
Ser.write(b'b')
move(1, 1, False)
c1 = 0
time.sleep(0.2)
cv2.imshow("Frame", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
cap.release()
Ser.close()
cv2.destroyAllWindows()
break | global x, y, z, yaw
x = pose_message.x
y = pose_message.y
yaw = pose_message.theta |
functoolz.py | from __future__ import annotations
import inspect
import sys
from functools import partial, reduce
from importlib import import_module
from operator import attrgetter, not_
from textwrap import dedent
from types import MethodType
from typing import Any, Callable, Dict, Generic, TypeVar, Union, overload
from .utils import no_default
__all__ = ('identity', 'apply', 'thread_first', 'thread_last', 'memoize',
'compose', 'compose_left', 'pipe', 'complement', 'juxt', 'do',
'curry', 'flip', 'excepts')
PYPY = hasattr(sys, 'pypy_version_info')
_T = TypeVar("_T")
_T2 = TypeVar("_T2")
def identity(x: _T) ->_T:
""" Identity function. Return x
>>> identity(3)
3
"""
return x
def apply(*func_and_args, **kwargs):
""" Applies a function and returns the results
>>> def double(x): return 2*x
>>> def inc(x): return x + 1
>>> apply(double, 5)
10
>>> tuple(map(apply, [double, inc, double], [10, 500, 8000]))
(20, 501, 16000)
"""
if not func_and_args:
raise TypeError('func argument is required')
func, args = func_and_args[0], func_and_args[1:]
return func(*args, **kwargs)
def thread_first(val, *forms):
""" Thread value through a sequence of functions/forms
>>> def double(x): return 2*x
>>> def inc(x): return x + 1
>>> thread_first(1, inc, double)
4
If the function expects more than one input you can specify those inputs
in a tuple. The value is used as the first input.
>>> def add(x, y): return x + y
>>> def pow(x, y): return x**y
>>> thread_first(1, (add, 4), (pow, 2)) # pow(add(1, 4), 2)
25
So in general
thread_first(x, f, (g, y, z))
expands to
g(f(x), y, z)
See Also:
thread_last
"""
def evalform_front(val, form):
if callable(form):
return form(val)
if isinstance(form, tuple):
func, args = form[0], form[1:]
args = (val,) + args
return func(*args)
return reduce(evalform_front, forms, val)
def thread_last(val, *forms):
""" Thread value through a sequence of functions/forms
>>> def double(x): return 2*x
>>> def inc(x): return x + 1
>>> thread_last(1, inc, double)
4
If the function expects more than one input you can specify those inputs
in a tuple. The value is used as the last input.
>>> def add(x, y): return x + y
>>> def pow(x, y): return x**y
>>> thread_last(1, (add, 4), (pow, 2)) # pow(2, add(4, 1))
32
So in general
thread_last(x, f, (g, y, z))
expands to
g(y, z, f(x))
>>> def iseven(x):
... return x % 2 == 0
>>> list(thread_last([1, 2, 3], (map, inc), (filter, iseven)))
[2, 4]
See Also:
thread_first
"""
def evalform_back(val, form):
if callable(form):
return form(val)
if isinstance(form, tuple):
func, args = form[0], form[1:]
args = args + (val,)
return func(*args)
return reduce(evalform_back, forms, val)
def instanceproperty(fget=None, fset=None, fdel=None, doc=None, classval=None):
""" Like @property, but returns ``classval`` when used as a class attribute
>>> class MyClass(object):
... '''The class docstring'''
... @instanceproperty(classval=__doc__)
... def __doc__(self):
... return 'An object docstring'
... @instanceproperty
... def val(self):
... return 42
...
>>> MyClass.__doc__
'The class docstring'
>>> MyClass.val is None
True
>>> obj = MyClass()
>>> obj.__doc__
'An object docstring'
>>> obj.val
42
"""
if fget is None:
return partial(instanceproperty, fset=fset, fdel=fdel, doc=doc,
classval=classval)
return InstanceProperty(fget=fget, fset=fset, fdel=fdel, doc=doc,
classval=classval)
class InstanceProperty(property):
""" Like @property, but returns ``classval`` when used as a class attribute
Should not be used directly. Use ``instanceproperty`` instead.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None,
classval=None):
self.classval = classval
property.__init__(self, fget=fget, fset=fset, fdel=fdel, doc=doc)
def __get__(self, obj, type=None):
if obj is None:
return self.classval
return property.__get__(self, obj, type)
def __reduce__(self):
state = (self.fget, self.fset, self.fdel, self.__doc__, self.classval)
return InstanceProperty, state
class curry(Generic[_T]):
""" Curry a callable function
Enables partial application of arguments through calling a function with an
incomplete set of arguments.
>>> def mul(x, y):
... return x * y
>>> mul = curry(mul)
>>> double = mul(2)
>>> double(10)
20
Also supports keyword arguments
>>> @curry # Can use curry as a decorator
... def f(x, y, a=10):
... return a * (x + y)
>>> add = f(a=1)
>>> add(2, 3)
5
See Also:
toolz.curried - namespace of curried functions
https://toolz.readthedocs.io/en/latest/curry.html
"""
@overload
def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ...
# this overload should never be used, mypy complains if only one overload exists
@overload
def __init__(self, *args: Union[Callable[..., _T], Any], **kwargs: Any) -> None: ...
def __init__(self, *args: Any, **kwargs: Any) -> None:
if not args:
raise TypeError('__init__() takes at least 2 arguments (1 given)')
func, args = args[0], args[1:]
if not callable(func):
raise TypeError("Input must be callable")
# curry- or functools.partial-like object? Unpack and merge arguments
if (
hasattr(func, 'func')
and hasattr(func, 'args')
and hasattr(func, 'keywords')
and isinstance(func.args, tuple)
):
_kwargs = {}
if func.keywords:
_kwargs.update(func.keywords)
_kwargs.update(kwargs)
kwargs = _kwargs
args = func.args + args
func = func.func
if kwargs:
self._partial = partial(func, *args, **kwargs)
else:
self._partial = partial(func, *args)
self.__doc__ = getattr(func, '__doc__', None)
self.__name__ = getattr(func, '__name__', '<curry>')
self.__module__ = getattr(func, '__module__', None)
self.__qualname__ = getattr(func, '__qualname__', None)
self._sigspec = None
self._has_unknown_args = None
@instanceproperty
def func(self) -> Callable[..., _T]:
return self._partial.func
@instanceproperty
def __signature__(self):
sig = inspect.signature(self.func)
args = self.args or ()
keywords = self.keywords or {}
if is_partial_args(self.func, args, keywords, sig) is False:
raise TypeError('curry object has incorrect arguments')
params = list(sig.parameters.values())
skip = 0
for param in params[:len(args)]:
if param.kind == param.VAR_POSITIONAL:
break
skip += 1
kwonly = False
newparams = []
for param in params[skip:]:
kind = param.kind
default = param.default
if kind == param.VAR_KEYWORD:
pass
elif kind == param.VAR_POSITIONAL:
if kwonly:
continue
elif param.name in keywords:
default = keywords[param.name]
kind = param.KEYWORD_ONLY
kwonly = True
else:
if kwonly:
kind = param.KEYWORD_ONLY
if default is param.empty:
default = no_default
newparams.append(param.replace(default=default, kind=kind))
return sig.replace(parameters=newparams)
@instanceproperty
def args(self):
return self._partial.args
@instanceproperty
def keywords(self) -> Dict[str, Any]:
return self._partial.keywords
@instanceproperty
def func_name(self) -> str:
return self.__name__
def __str__(self) -> str:
return str(self.func)
def __repr__(self) -> str:
return repr(self.func)
def __hash__(self) -> int:
return hash((self.func, self.args,
frozenset(self.keywords.items()) if self.keywords
else None))
def __eq__(self, other: Any) -> bool:
return (isinstance(other, curry) and self.func == other.func and
self.args == other.args and self.keywords == other.keywords)
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
def __call__(self, *args: Any, **kwargs: Any) -> Union[_T, curry[_T]]:
try:
return self._partial(*args, **kwargs)
except TypeError as exc:
if self._should_curry(args, kwargs, exc):
return self.bind(*args, **kwargs)
raise
def _should_curry(self, args, kwargs, exc=None):
func = self.func
args = self.args + args
if self.keywords:
kwargs = dict(self.keywords, **kwargs)
if self._sigspec is None:
sigspec = self._sigspec = _sigs.signature_or_spec(func)
self._has_unknown_args = has_varargs(func, sigspec) is not False
else:
sigspec = self._sigspec
if is_partial_args(func, args, kwargs, sigspec) is False:
# Nothing can make the call valid
return False
elif self._has_unknown_args:
# The call may be valid and raised a TypeError, but we curry
# anyway because the function may have `*args`. This is useful
# for decorators with signature `func(*args, **kwargs)`.
return True
elif not is_valid_args(func, args, kwargs, sigspec):
# Adding more arguments may make the call valid
return True
else:
# There was a genuine TypeError
return False
def bind(self, *args, **kwargs) -> curry[_T]:
return type(self)(self, *args, **kwargs)
def call(self, *args, **kwargs) -> _T:
return self._partial(*args, **kwargs)
def __get__(self, instance, owner):
if instance is None:
return self
return curry(self, instance)
def __reduce__(self):
func = self.func
modname = getattr(func, '__module__', None)
qualname = getattr(func, '__qualname__', None)
if qualname is None: # pragma: no cover
qualname = getattr(func, '__name__', None)
is_decorated = None
if modname and qualname:
attrs = []
obj = import_module(modname)
for attr in qualname.split('.'):
if isinstance(obj, curry):
attrs.append('func')
obj = obj.func
obj = getattr(obj, attr, None)
if obj is None:
break
attrs.append(attr)
if isinstance(obj, curry) and obj.func is func:
is_decorated = obj is self
qualname = '.'.join(attrs)
func = '%s:%s' % (modname, qualname)
# functools.partial objects can't be pickled
userdict = tuple((k, v) for k, v in self.__dict__.items()
if k not in ('_partial', '_sigspec'))
state = (type(self), func, self.args, self.keywords, userdict,
is_decorated)
return _restore_curry, state
def _restore_curry(cls, func, args, kwargs, userdict, is_decorated):
if isinstance(func, str):
modname, qualname = func.rsplit(':', 1)
obj = import_module(modname)
for attr in qualname.split('.'):
obj = getattr(obj, attr)
if is_decorated:
return obj
func = obj.func
obj = cls(func, *args, **(kwargs or {}))
obj.__dict__.update(userdict)
return obj
@curry
def memoize(func, cache=None, key=None):
""" Cache a function's result for speedy future evaluation
Considerations:
Trades memory for speed.
Only use on pure functions.
>>> def add(x, y): return x + y
>>> add = memoize(add)
Or use as a decorator
>>> @memoize
... def add(x, y):
... return x + y
Use the ``cache`` keyword to provide a dict-like object as an initial cache
>>> @memoize(cache={(1, 2): 3})
... def add(x, y):
... return x + y
Note that the above works as a decorator because ``memoize`` is curried.
It is also possible to provide a ``key(args, kwargs)`` function that
calculates keys used for the cache, which receives an ``args`` tuple and
``kwargs`` dict as input, and must return a hashable value. However,
the default key function should be sufficient most of the time.
>>> # Use key function that ignores extraneous keyword arguments
>>> @memoize(key=lambda args, kwargs: args)
... def add(x, y, verbose=False):
... if verbose:
... print('Calculating %s + %s' % (x, y))
... return x + y
"""
if cache is None:
cache = {}
try:
may_have_kwargs = has_keywords(func) is not False
# Is unary function (single arg, no variadic argument or keywords)?
is_unary = is_arity(1, func)
except TypeError: # pragma: no cover
may_have_kwargs = True
is_unary = False
if key is None:
if is_unary:
def key(args, kwargs):
return args[0]
elif may_have_kwargs:
def key(args, kwargs):
return (
args or None,
frozenset(kwargs.items()) if kwargs else None,
)
else:
def key(args, kwargs):
return args
def memof(*args, **kwargs):
k = key(args, kwargs)
try:
return cache[k]
except TypeError:
raise TypeError("Arguments to memoized function must be hashable")
except KeyError:
cache[k] = result = func(*args, **kwargs)
return result
try:
memof.__name__ = func.__name__
except AttributeError:
pass
memof.__doc__ = func.__doc__
memof.__wrapped__ = func
return memof
class Compose(object):
""" A composition of functions
See Also:
compose
"""
__slots__ = 'first', 'funcs'
def __init__(self, funcs):
funcs = tuple(reversed(funcs))
self.first = funcs[0]
self.funcs = funcs[1:]
def __call__(self, *args, **kwargs):
ret = self.first(*args, **kwargs)
for f in self.funcs:
ret = f(ret)
return ret
def __getstate__(self):
return self.first, self.funcs
def __setstate__(self, state):
self.first, self.funcs = state
@instanceproperty(classval=__doc__)
def __doc__(self):
def composed_doc(*fs):
"""Generate a docstring for the composition of fs.
"""
if not fs:
# Argument name for the docstring.
return '*args, **kwargs'
return '{f}({g})'.format(f=fs[0].__name__, g=composed_doc(*fs[1:]))
try:
return (
'lambda *args, **kwargs: ' +
composed_doc(*reversed((self.first,) + self.funcs))
)
except AttributeError:
# One of our callables does not have a `__name__`, whatever.
return 'A composition of functions'
@property
def __name__(self):
try:
return '_of_'.join(
(f.__name__ for f in reversed((self.first,) + self.funcs))
)
except AttributeError:
return type(self).__name__
def __repr__(self):
return '{.__class__.__name__}{!r}'.format(
self, tuple(reversed((self.first, ) + self.funcs)))
def __eq__(self, other):
if isinstance(other, Compose):
return other.first == self.first and other.funcs == self.funcs
return NotImplemented
def __ne__(self, other):
equality = self.__eq__(other)
return NotImplemented if equality is NotImplemented else not equality
def __hash__(self):
return hash(self.first) ^ hash(self.funcs)
# Mimic the descriptor behavior of python functions.
# i.e. let Compose be called as a method when bound to a class.
# adapted from
# docs.python.org/3/howto/descriptor.html#functions-and-methods
def __get__(self, obj, objtype=None):
return self if obj is None else MethodType(self, obj)
# introspection with Signature is only possible from py3.3+
@instanceproperty
def __signature__(self):
base = inspect.signature(self.first)
last = inspect.signature(self.funcs[-1])
return base.replace(return_annotation=last.return_annotation)
__wrapped__ = instanceproperty(attrgetter('first'))
def compose(*funcs):
""" Compose functions to operate in series.
Returns a function that applies other functions in sequence.
Functions are applied from right to left so that
``compose(f, g, h)(x, y)`` is the same as ``f(g(h(x, y)))``.
If no arguments are provided, the identity function (f(x) = x) is returned.
>>> inc = lambda i: i + 1
>>> compose(str, inc)(3)
'4'
See Also:
compose_left
pipe
"""
if not funcs:
return identity
if len(funcs) == 1:
return funcs[0]
else:
return Compose(funcs)
def compose_left(*funcs):
""" Compose functions to operate in series.
Returns a function that applies other functions in sequence.
Functions are applied from left to right so that
``compose_left(f, g, h)(x, y)`` is the same as ``h(g(f(x, y)))``.
If no arguments are provided, the identity function (f(x) = x) is returned.
>>> inc = lambda i: i + 1
>>> compose_left(inc, str)(3)
'4'
See Also:
compose
pipe
"""
return compose(*reversed(funcs))
def pipe(data, *funcs):
""" Pipe a value through a sequence of functions
I.e. ``pipe(data, f, g, h)`` is equivalent to ``h(g(f(data)))``
We think of the value as progressing through a pipe of several
transformations, much like pipes in UNIX
``$ cat data | f | g | h``
>>> double = lambda i: 2 * i
>>> pipe(3, double, str)
'6'
See Also:
compose
compose_left
thread_first
thread_last
"""
for func in funcs:
data = func(data)
return data
def complement(func):
""" Convert a predicate function to its logical complement.
In other words, return a function that, for inputs that normally
yield True, yields False, and vice-versa.
>>> def iseven(n): return n % 2 == 0
>>> isodd = complement(iseven)
>>> iseven(2)
True
>>> isodd(2)
False
"""
return compose(not_, func)
class juxt(object):
""" Creates a function that calls several functions with the same arguments
Takes several functions and returns a function that applies its arguments
to each of those functions then returns a tuple of the results.
Name comes from juxtaposition: the fact of two things being seen or placed
close together with contrasting effect.
>>> inc = lambda x: x + 1
>>> double = lambda x: x * 2
>>> juxt(inc, double)(10)
(11, 20)
>>> juxt([inc, double])(10)
(11, 20)
"""
__slots__ = ['funcs']
def __init__(self, *funcs):
if len(funcs) == 1 and not callable(funcs[0]):
funcs = funcs[0]
self.funcs = tuple(funcs)
def __call__(self, *args, **kwargs):
return tuple(func(*args, **kwargs) for func in self.funcs)
def __getstate__(self):
return self.funcs
def __setstate__(self, state):
self.funcs = state
def do(func: Callable[[_T], Any], x: _T) -> _T:
""" Runs ``func`` on ``x``, returns ``x``
| effects of ``func`` are relevant.
Logging functions can be made by composing ``do`` with a storage function
like ``list.append`` or ``file.write``
>>> from toolz import compose
>>> from toolz.curried import do
>>> log = []
>>> inc = lambda x: x + 1
>>> inc = compose(inc, do(log.append))
>>> inc(1)
2
>>> inc(11)
12
>>> log
[1, 11]
"""
func(x)
return x
@curry
def flip(func: Callable[[_T, _T], _T2], a: _T, b: _T) -> _T2:
""" Call the function call with the arguments flipped
This function is curried.
>>> def div(a, b):
... return a // b
...
>>> flip(div, 2, 6)
3
>>> div_by_two = flip(div, 2)
>>> div_by_two(4)
2
This is particularly useful for built in functions and functions defined
in C extensions that accept positional only arguments. For example:
isinstance, issubclass.
>>> data = [1, 'a', 'b', 2, 1.5, object(), 3]
>>> only_ints = list(filter(flip(isinstance, int), data))
>>> only_ints
[1, 2, 3]
"""
return func(b, a)
def return_none(exc: Any) -> None:
""" Returns None.
"""
return None
class excepts(object):
"""A wrapper around a function to catch exceptions and
dispatch to a handler.
This is like a functional try/except block, in the same way that
ifexprs are functional if/else blocks.
Examples
--------
>>> excepting = excepts(
... ValueError,
... lambda a: [1, 2].index(a),
... lambda _: -1,
... )
>>> excepting(1)
0
>>> excepting(3)
-1
Multiple exceptions and default except clause.
>>> excepting = excepts((IndexError, KeyError), lambda a: a[0])
>>> excepting([])
>>> excepting([1])
1
>>> excepting({})
>>> excepting({0: 1})
1
"""
def __init__(self, exc, func, handler=return_none):
self.exc = exc
self.func = func
self.handler = handler
def __call__(self, *args, **kwargs):
try:
return self.func(*args, **kwargs)
except self.exc as e:
return self.handler(e)
@instanceproperty(classval=__doc__)
def __doc__(self):
exc = self.exc
try:
if isinstance(exc, tuple):
exc_name = '(%s)' % ', '.join(
map(attrgetter('__name__'), exc),
)
else:
exc_name = exc.__name__
return dedent(
"""\
A wrapper around {inst.func.__name__!r} that will except:
{exc}
and handle any exceptions with {inst.handler.__name__!r}.
Docs for {inst.func.__name__!r}:
{inst.func.__doc__}
Docs for {inst.handler.__name__!r}:
{inst.handler.__doc__}
"""
).format(
inst=self,
exc=exc_name,
)
except AttributeError:
return type(self).__doc__
@property
def __name__(self):
exc = self.exc
try:
if isinstance(exc, tuple):
exc_name = '_or_'.join(map(attrgetter('__name__'), exc))
else:
exc_name = exc.__name__
return '%s_excepting_%s' % (self.func.__name__, exc_name)
except AttributeError:
return 'excepting'
def _check_sigspec(sigspec, func, builtin_func, *builtin_args):
if sigspec is None:
try:
sigspec = inspect.signature(func)
except (ValueError, TypeError) as e:
sigspec = e
if isinstance(sigspec, ValueError):
return None, builtin_func(*builtin_args)
elif not isinstance(sigspec, inspect.Signature):
if (
func in _sigs.signatures
and ((
hasattr(func, '__signature__')
and hasattr(func.__signature__, '__get__')
))
):
val = builtin_func(*builtin_args)
return None, val
return None, False
return sigspec, None
if PYPY: # pragma: no cover
_check_sigspec_orig = _check_sigspec
def _check_sigspec(sigspec, func, builtin_func, *builtin_args):
# PyPy may lie, so use our registry for builtins instead
if func in _sigs.signatures:
val = builtin_func(*builtin_args)
return None, val
return _check_sigspec_orig(sigspec, func, builtin_func, *builtin_args)
_check_sigspec.__doc__ = """ \
Private function to aid in introspection compatibly across Python versions.
If a callable doesn't have a signature (Python 3) or an argspec (Python 2),
the signature registry in toolz._signatures is used.
"""
def num_required_args(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._num_required_args,
func)
if sigspec is None:
return rv
return sum(1 for p in sigspec.parameters.values()
if p.default is p.empty
and p.kind in (p.POSITIONAL_OR_KEYWORD, p.POSITIONAL_ONLY))
def has_varargs(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._has_varargs, func)
if sigspec is None:
return rv
return any(p.kind == p.VAR_POSITIONAL
for p in sigspec.parameters.values())
def has_keywords(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._has_keywords, func)
if sigspec is None:
return rv
return any(p.default is not p.empty
or p.kind in (p.KEYWORD_ONLY, p.VAR_KEYWORD)
for p in sigspec.parameters.values())
def is_valid_args(func, args, kwargs, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_valid_args,
func, args, kwargs)
if sigspec is None:
return rv
try:
sigspec.bind(*args, **kwargs)
except TypeError:
return False
return True
def is_partial_args(func, args, kwargs, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_partial_args,
func, args, kwargs)
if sigspec is None:
return rv
try:
sigspec.bind_partial(*args, **kwargs)
except TypeError:
return False
return True
def is_arity(n, func, sigspec=None):
""" Does a function have only n positional arguments?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(x):
... return x
>>> is_arity(1, f)
True
>>> def g(x, y=1):
... return x + y
>>> is_arity(1, g)
False
"""
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_arity, n, func)
if sigspec is None:
return rv
num = num_required_args(func, sigspec)
if num is not None:
num = num == n
if not num:
return False
varargs = has_varargs(func, sigspec)
if varargs:
return False
keywords = has_keywords(func, sigspec)
if keywords:
return False
if num is None or varargs is None or keywords is None: # pragma: no cover
return None
return True
num_required_args.__doc__ = """ \
Number of required positional arguments
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(x, y, z=3):
... return x + y + z
>>> num_required_args(f)
2
>>> def g(*args, **kwargs):
... pass
>>> num_required_args(g)
0
"""
has_varargs.__doc__ = """ \
Does a function have variadic positional arguments?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(*args):
... return args
>>> has_varargs(f)
True
>>> def g(**kwargs):
... return kwargs
>>> has_varargs(g)
False
"""
has_keywords.__doc__ = """ \
Does a function have keyword arguments?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(x, y=0):
... return x + y
>>> has_keywords(f)
True
"""
is_valid_args.__doc__ = """ \
Is ``func(*args, **kwargs)`` a valid function call?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def add(x, y):
... return x + y
>>> is_valid_args(add, (1,), {})
False
>>> is_valid_args(add, (1, 2), {})
True
>>> is_valid_args(map, (), {})
False
**Implementation notes**
Python 2 relies on ``inspect.getargspec``, which only works for
user-defined functions. Python 3 uses ``inspect.signature``, which
works for many more types of callables.
Many builtins in the standard library are also supported.
"""
is_partial_args.__doc__ = """ \
Can partial(func, *args, **kwargs)(*args2, **kwargs2) be a valid call?
Returns True *only* if the call is valid or if it is possible for the
call to become valid by adding more positional or keyword arguments.
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def add(x, y):
... return x + y
>>> is_partial_args(add, (1,), {})
True
>>> is_partial_args(add, (1, 2), {})
True
>>> is_partial_args(add, (1, 2, 3), {})
False
>>> is_partial_args(map, (), {})
True
**Implementation notes**
Python 2 relies on ``inspect.getargspec``, which only works for
user-defined functions. Python 3 uses ``inspect.signature``, which
works for many more types of callables.
Many builtins in the standard library are also supported.
"""
from . import _signatures as _sigs | Because the results of ``func`` are not returned, only the side |
replay.go | package main
import (
"context"
"flag"
"fmt"
"strings"
"time"
"github.com/cheggaaa/pb/v3"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/config"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/datasource"
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/remotewrite"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
"github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal"
)
var (
replayFrom = flag.String("replay.timeFrom", "",
"The time filter in RFC3339 format to select time series with timestamp equal or higher than provided value. E.g. '2020-01-01T20:07:00Z'")
replayTo = flag.String("replay.timeTo", "",
"The time filter in RFC3339 format to select timeseries with timestamp equal or lower than provided value. E.g. '2020-01-01T20:07:00Z'")
replayRulesDelay = flag.Duration("replay.rulesDelay", time.Second,
"Delay between rules evaluation within the group. Could be important if there are chained rules inside of the group"+
"and processing need to wait for previous rule results to be persisted by remote storage before evaluating the next rule."+
"Keep it equal or bigger than -remoteWrite.flushInterval.")
replayMaxDatapoints = flag.Int("replay.maxDatapointsPerQuery", 1e3,
"Max number of data points expected in one request. The higher the value, the less requests will be made during replay.")
replayRuleRetryAttempts = flag.Int("replay.ruleRetryAttempts", 5,
"Defines how many retries to make before giving up on rule if request for it returns an error.")
)
func replay(groupsCfg []config.Group, qb datasource.QuerierBuilder, rw *remotewrite.Client) error {
if *replayMaxDatapoints < 1 {
return fmt.Errorf("replay.maxDatapointsPerQuery can't be lower than 1")
}
tFrom, err := time.Parse(time.RFC3339, *replayFrom)
if err != nil {
return fmt.Errorf("failed to parse %q: %s", *replayFrom, err)
}
tTo, err := time.Parse(time.RFC3339, *replayTo)
if err != nil {
return fmt.Errorf("failed to parse %q: %s", *replayTo, err)
}
if !tTo.After(tFrom) {
return fmt.Errorf("replay.timeTo must be bigger than replay.timeFrom")
}
labels := make(map[string]string)
for _, s := range *externalLabels {
if len(s) == 0 {
continue
}
n := strings.IndexByte(s, '=')
if n < 0 {
return fmt.Errorf("missing '=' in `-label`. It must contain label in the form `name=value`; got %q", s)
}
labels[s[:n]] = s[n+1:]
}
fmt.Printf("Replay mode:"+
"\nfrom: \t%v "+
"\nto: \t%v "+
"\nmax data points per request: %d\n",
tFrom, tTo, *replayMaxDatapoints)
var total int
for _, cfg := range groupsCfg {
ng := newGroup(cfg, qb, *evaluationInterval, labels)
total += ng.replay(tFrom, tTo, rw)
}
logger.Infof("replay finished! Imported %d samples", total)
if rw != nil {
return rw.Close()
}
return nil
}
func (g *Group) replay(start, end time.Time, rw *remotewrite.Client) int {
var total int | ri := rangeIterator{start: start, end: end, step: step}
iterations := int(end.Sub(start)/step) + 1
fmt.Printf("\nGroup %q"+
"\ninterval: \t%v"+
"\nrequests to make: \t%d"+
"\nmax range per request: \t%v\n",
g.Name, g.Interval, iterations, step)
for _, rule := range g.Rules {
fmt.Printf("> Rule %q (ID: %d)\n", rule, rule.ID())
bar := pb.StartNew(iterations)
ri.reset()
for ri.next() {
n, err := replayRule(rule, ri.s, ri.e, rw)
if err != nil {
logger.Fatalf("rule %q: %s", rule, err)
}
total += n
bar.Increment()
}
bar.Finish()
// sleep to let remote storage to flush data on-disk
// so chained rules could be calculated correctly
time.Sleep(*replayRulesDelay)
}
return total
}
func replayRule(rule Rule, start, end time.Time, rw *remotewrite.Client) (int, error) {
var err error
var tss []prompbmarshal.TimeSeries
for i := 0; i < *replayRuleRetryAttempts; i++ {
tss, err = rule.ExecRange(context.Background(), start, end)
if err == nil {
break
}
logger.Errorf("attempt %d to execute rule %q failed: %s", i+1, rule, err)
time.Sleep(time.Second)
}
if err != nil { // means all attempts failed
return 0, err
}
if len(tss) < 1 {
return 0, nil
}
var n int
for _, ts := range tss {
if err := rw.Push(ts); err != nil {
return n, fmt.Errorf("remote write failure: %s", err)
}
n += len(ts.Samples)
}
return n, nil
}
type rangeIterator struct {
step time.Duration
start, end time.Time
iter int
s, e time.Time
}
func (ri *rangeIterator) reset() {
ri.iter = 0
ri.s, ri.e = time.Time{}, time.Time{}
}
func (ri *rangeIterator) next() bool {
ri.s = ri.start.Add(ri.step * time.Duration(ri.iter))
if !ri.end.After(ri.s) {
return false
}
ri.e = ri.s.Add(ri.step)
if ri.e.After(ri.end) {
ri.e = ri.end
}
ri.iter++
return true
} | step := g.Interval * time.Duration(*replayMaxDatapoints) |
error-boundary.tsx | /**
* Copyright (c) 2019, cic (http://www.cic.org) All Rights Reserved.
*
* cic licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React, { PropsWithChildren } from "react";
/**
* Error boundary state interface.
*/
interface ErrorBoundaryState {
error: any;
errorInfo: any;
}
/**
* Error boundary props interface.
*/
interface ErrorBoundaryProps {
fallback: React.ReactNode;
}
/**
* Error boundary component to avoid JavaScript errors from breaking
* the entire app due to an error in a specific UI part.
* This component is an implementation of the error boundary concept
* introduced in React 16.
* @see {@link https://reactjs.org/docs/error-boundaries.html}
*
* @param {PlaceholderProps} props - Props injected in to the placeholder component.
* @return {JSX.Element}
*/
export class | extends React.Component<PropsWithChildren<ErrorBoundaryProps>, ErrorBoundaryState> {
constructor(props) {
super(props);
this.state = {
error: null,
errorInfo: null
};
}
componentDidCatch(error, errorInfo) {
// Catch errors in any components below and re-render with error message
this.setState({
error,
errorInfo
});
}
render() {
const { errorInfo } = this.state;
const { children, fallback } = this.props;
// If there's an error, render the fallback.
if (errorInfo) {
return fallback;
}
// Just render children
return children;
}
}
| ErrorBoundary |
views.py | from django.shortcuts import render, redirect
from django.contrib import messages
from .forms import UserRegisterForm
def | (request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'Account created for {username}!')
return redirect('blog-home')
else:
form = UserRegisterForm()
return render(request, 'users/register.html', {'form': form})
| register |
run-chart.js | function pad(a){return a<10?"0"+a:a}function | (a,t,e){for(var r=[pad(a)],o=a;o<t;)o+=e,r.push(pad(o));return r}function drawUSRegionsMap(){var a=google.visualization.arrayToDataTable([["City","Profile Visits","Post Likes"],["New York City",276147,12855],["Los Angeles",135241,18421],["Chicago",9595,1217],["Austin",9063,13360],["Washington",276147,12855],["Colorado",95975,15217]]);new google.visualization.GeoChart(USMapChart).draw(a,{resolution:"provinces",region:"US",displayMode:"markers",legend:"none",colorAxis:{colors:["#38a9ff","#08ddc1"]}})}var twoBarChart=document.getElementById("two-bars-chart");if(null!==twoBarChart)var ctx_tb=twoBarChart.getContext("2d"),data_tb={labels:range(2011,2016,1),datasets:[{label:"Statistic 02",backgroundColor:"#ffdc1b",borderSkipped:"bottom",data:[43,47,38,30,47,39]},{label:"Statistic 01",backgroundColor:"#ff5e3a",borderSkipped:"bottom",borderWidth:0,data:[36,30,45,50,39,41]}]},twoBarChartEl=new Chart(ctx_tb,{type:"bar",data:data_tb,options:{legend:{display:!1},tooltips:{mode:"index",intersect:!1},responsive:!0,scales:{xAxes:[{barPercentage:.7,gridLines:{display:!1},ticks:{fontColor:"#888da8"}}],yAxes:[{stacked:!0,gridLines:{display:!1},ticks:{beginAtZero:!0,fontColor:"#888da8"}}]}}});var lineStackedChart=document.getElementById("line-stacked-chart");if(null!==lineStackedChart)var ctx_ls=lineStackedChart.getContext("2d"),data_ls={labels:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],datasets:[{label:" - Favorites",backgroundColor:"rgba(57,169,255,0.35)",borderColor:"#38a9ff",borderWidth:4,pointBorderColor:"#38a9ff",pointBackgroundColor:"#fff",pointBorderWidth:4,pointRadius:6,pointHoverRadius:8,data:[98,42,38,57,82,41,36,30,45,62,64,80]},{label:" - Visitors",backgroundColor:"rgba(8,221,123,0.2)",borderColor:"#08ddc1",borderWidth:4,pointBorderColor:"#08ddc1",pointBackgroundColor:"#fff",pointBorderWidth:4,pointRadius:6,pointHoverRadius:8,data:[78,101,80,87,120,105,110,76,101,96,100,135]}]},lineStackedEl=new Chart(ctx_ls,{type:"line",data:data_ls,options:{legend:{display:!1},responsive:!0,scales:{xAxes:[{gridLines:{color:"#f0f4f9"},ticks:{fontColor:"#888da8"}}],yAxes:[{gridLines:{display:!1},ticks:{beginAtZero:!0,fontColor:"#888da8"}}]}}});var oneBarChart=document.getElementById("one-bar-chart");if(null!==oneBarChart)var ctx_ob=oneBarChart.getContext("2d"),data_ob={labels:range(1,31,1),datasets:[{backgroundColor:"#38a9ff",data:[9,11,8,6,13,7,7,0,9,12,7,13,12,8,1,10,9,7,3,7,10,4,14,9,6,6,11,12,3,4,2]},{backgroundColor:"#ebecf1",data:[11,9,12,14,7,13,13,20,11,8,13,7,8,12,19,10,11,13,17,13,10,16,6,11,14,14,9,8,17,16,18]}]},oneBarEl=new Chart(ctx_ob,{type:"bar",data:data_ob,options:{deferred:{delay:200},tooltips:{enabled:!1},legend:{display:!1},responsive:!0,scales:{xAxes:[{stacked:!0,barPercentage:.6,gridLines:{display:!1},ticks:{fontColor:"#888da8"}}],yAxes:[{stacked:!0,gridLines:{color:"#f0f4f9"},ticks:{beginAtZero:!0,fontColor:"#888da8"}}]}}});var lineGraphicChart=document.getElementById("line-graphic-chart");if(null!==lineGraphicChart)var ctx_lg=lineGraphicChart.getContext("2d"),data_lg={labels:["Aug 8","Aug 15","Aug 21","Aug 28","Sep 4","Sep 11","Sep 19","Sep 26","Oct 3","Oct 10","Oct 16","Oct 23","Oct 30"],datasets:[{label:" - Favorites",backgroundColor:"rgba(255,215,27,0.6)",borderColor:"#ffd71b",borderWidth:4,pointBorderColor:"#ffd71b",pointBackgroundColor:"#fff",pointBorderWidth:4,pointRadius:0,pointHoverRadius:8,data:[98,42,38,57,82,41,36,30,45,62,64,80,68]},{label:" - Visitors",backgroundColor:"rgba(255,94,58,0.6)",borderColor:"#ff5e3a",borderWidth:4,pointBorderColor:"#ff5e3a",pointBackgroundColor:"#fff",pointBorderWidth:4,pointRadius:0,pointHoverRadius:8,data:[78,101,80,87,120,105,110,76,101,96,100,115,135]}]},lineGraphicEl=new Chart(ctx_lg,{type:"line",data:data_lg,options:{deferred:{delay:300},legend:{display:!1},responsive:!0,scales:{xAxes:[{gridLines:{color:"#f0f4f9"},ticks:{fontColor:"#888da8"}}],yAxes:[{gridLines:{display:!1},ticks:{beginAtZero:!0,fontColor:"#888da8"}}]}}});var pieColorChart=document.getElementById("pie-color-chart");if(null!==pieColorChart)var ctx_pc=pieColorChart.getContext("2d"),data_pc={labels:["Status Updates","Multimedia","Shared Posts","Blog Posts"],datasets:[{data:[8.247,5.63,1.498,1.136],borderWidth:0,backgroundColor:["#7c5ac2","#08ddc1","#ff5e3a","#ffd71b"]}]},pieColorEl=new Chart(ctx_pc,{type:"doughnut",data:data_pc,options:{deferred:{delay:300},cutoutPercentage:93,legend:{display:!1},animation:{animateScale:!1}}});!function(a){"use strict";var t=a(".pie-chart");t.appear({force_process:!0}),t.on("appear",function(){var t=a(this);if(!t.data("inited")){var e=t.data("startcolor"),r=t.data("endcolor"),o=100*t.data("value");t.circleProgress({thickness:16,size:360,startAngle:-Math.PI/4*2,emptyFill:"#ebecf1",lineCap:"round",fill:{gradient:[r,e],gradientAngle:Math.PI/4}}).on("circle-animation-progress",function(a,e){t.find(".content").html(parseInt(o*e,10)+"<span>%</span>")}),t.data("inited",!0)}})}(jQuery);var USMapChart=document.getElementById("us-chart-map");null!==USMapChart&&(google.charts.load("current",{packages:["geochart"]}),google.charts.setOnLoadCallback(drawUSRegionsMap));var lineChart=document.getElementById("line-chart");if(null!==lineChart)var ctx_lc=lineChart.getContext("2d"),data_lc={labels:["January","February","March","April","May","June","July","August","September","October","November","December"],datasets:[{label:" - Comments",borderColor:"#ffdc1b",borderWidth:4,pointBorderColor:"#ffdc1b",pointBackgroundColor:"#fff",pointBorderWidth:4,pointRadius:6,pointHoverRadius:8,fill:!1,lineTension:0,data:[96,63,136,78,111,83,101,83,102,61,45,135]},{label:" - Likes",borderColor:"#08ddc1",borderWidth:4,pointBorderColor:"#08ddc1",pointBackgroundColor:"#fff",pointBorderWidth:4,pointRadius:6,pointHoverRadius:8,fill:!1,lineTension:0,data:[118,142,119,123,165,139,145,116,152,123,139,195]}]},lineChartEl=new Chart(ctx_lc,{type:"line",data:data_lc,options:{legend:{display:!1},responsive:!0,scales:{xAxes:[{ticks:{fontColor:"#888da8"},gridLines:{color:"#f0f4f9"}}],yAxes:[{gridLines:{color:"#f0f4f9"},ticks:{beginAtZero:!0,fontColor:"#888da8"}}]}}});var pieSmallChart=document.getElementById("pie-small-chart");if(null!==pieSmallChart)var ctx_sc=pieSmallChart.getContext("2d"),data_sc={labels:["Yearly Likes","Yearly Comments"],datasets:[{data:[65.048,42.973],borderWidth:0,backgroundColor:["#08ddc1","#ffdc1b"]}]},pieSmallEl=new Chart(ctx_sc,{type:"doughnut",data:data_sc,options:{deferred:{delay:300},cutoutPercentage:93,legend:{display:!1},animation:{animateScale:!1}}});var twoBar2Chart=document.getElementById("two-bar-chart-2");if(null!==twoBar2Chart)var ctx_tb2=twoBar2Chart.getContext("2d"),data_tb2={labels:range(2011,2016,1),datasets:[{label:"Facebook",backgroundColor:"#2f5b9d",borderSkipped:"bottom",data:[43,47,38,30,47,39]},{label:"Twitter",backgroundColor:"#38bff1",borderSkipped:"bottom",borderWidth:0,data:[36,30,45,50,39,41]}]},twoBar2ChartEl=new Chart(ctx_tb2,{type:"bar",data:data_tb2,options:{legend:{display:!1},tooltips:{mode:"index",intersect:!1},responsive:!0,scales:{xAxes:[{barPercentage:.5,gridLines:{display:!1},ticks:{fontColor:"#888da8"}}],yAxes:[{gridLines:{display:!1},ticks:{beginAtZero:!0,fontColor:"#888da8"}}]}}});var radarChart=document.getElementById("radar-chart");if(null!==radarChart)var ctx_rc=radarChart.getContext("2d"),data_rc={datasets:[{data:[11,16,26],backgroundColor:["#38a9ff","#ff5e3a","#ffdc1b"]}],labels:["Blue","Orange","Yellow"]},radarChartEl=new Chart(ctx_rc,{type:"pie",data:data_rc,options:{deferred:{delay:300},legend:{display:!1},scale:{gridLines:{display:!1},ticks:{beginAtZero:!0},reverse:!1},animation:{animateScale:!0}}}); | range |
core-utils.spec.ts | import domino from 'domino';
import {
atomInline,
blockquote,
createEditor,
doc,
em,
p,
pm,
schema as testSchema,
tableRow,
} from 'jest-prosemirror';
import { renderEditor } from 'jest-remirror';
import type { TextSelection } from '@remirror/pm/state';
import {
BlockquoteExtension,
BoldExtension,
docNodeBasicJSON,
HeadingExtension,
ItalicExtension,
} from '@remirror/testing';
import {
areSchemasCompatible,
areStatesEqual,
atDocEnd,
atDocStart,
canInsertNode,
createDocumentNode,
endPositionOfParent,
fromHtml,
getCursor,
getInvalidContent,
getMarkAttributes,
getMarkRange,
getNearestNonTextElement,
getRemirrorJSON,
getSelectedWord,
isDocNode,
isDocNodeEmpty,
isElementDomNode,
isEmptyBlockNode,
isMarkActive,
isNodeSelection,
isProsemirrorNode,
isRemirrorJSON,
isSelection,
isTextDomNode,
isTextSelection,
startPositionOfParent,
toDom,
toHtml,
} from '../core-utils';
describe('isEmptyBlockNode', () => {
it('should be true for empty nodes', () => {
const { state } = createEditor(doc(p('<cursor>')));
expect(isEmptyBlockNode(state.selection.$from.node())).toBeTrue();
});
it('should be false for non-empty nodes', () => {
const { state } = createEditor(doc(p('abc<cursor>')));
expect(isEmptyBlockNode(state.selection.$from.node())).toBeFalse();
});
});
describe('markActive', () => {
it('shows active when within an active region', () => {
const { state, schema } = createEditor(doc(p('Something', em('is <cursor>italic'), ' here')));
expect(isMarkActive({ trState: state, type: schema.marks.em })).toBeTrue();
});
it('returns false when not within an active region', () => {
const { state, schema } = createEditor(doc(p('Something<cursor>', em('is italic'), ' here')));
expect(isMarkActive({ trState: state, type: schema.marks.em })).toBeFalse();
});
it('returns false with no selection', () => {
const { state, schema } = createEditor(doc(p(' ', em('italic'))));
expect(isMarkActive({ trState: state, type: schema.marks.em })).toBeFalse();
});
it('returns true when surrounding an active region', () => {
const { state, schema } = createEditor(
doc(p('Something<start>', em('is italic'), '<end> here')),
);
expect(isMarkActive({ trState: state, type: schema.marks.em })).toBeTrue();
});
it('can override from and to', () => {
const { state, schema } = createEditor(
doc(p('<start>Something<end>', em('is italic'), ' here')),
);
expect(isMarkActive({ trState: state, type: schema.marks.em, from: 11, to: 20 })).toBeTrue();
});
it('is false when empty document with from and to specified', () => {
const { state, schema } = createEditor(doc(p('')));
expect(isMarkActive({ trState: state, type: schema.marks.em, from: 11, to: 20 })).toBeFalse();
});
it('is false when from and to specified in empty node', () => {
const { state, schema } = createEditor(doc(p(em('is italic')), p('')));
expect(isMarkActive({ trState: state, type: schema.marks.em, from: 11, to: 20 })).toBeFalse();
});
});
describe('canInsertNode', () => {
it('returns true when node can be inserted', () => {
const { state, schema } = createEditor(doc(p('Something<cursor>')));
expect(canInsertNode(state, schema.nodes.heading)).toBeTrue();
});
it('returns false when node cannot be inserted into table row', () => {
const { state, schema } = createEditor(doc(tableRow('<cursor>')));
expect(canInsertNode(state, schema.nodes.paragraph)).toBeFalse();
});
it('does not throw error for inserting into leaf node', () => {
const { state, schema } = createEditor(doc(atomInline('<cursor>')));
expect(canInsertNode(state, schema.nodes.paragraph)).toBeFalse();
});
});
describe('isDocNodeEmpty', () => {
it('returns true for a doc with an empty paragraph', () => {
expect(isDocNodeEmpty(doc(p()))).toBeTrue();
});
it('returns false for a doc with nothing inside', () => {
expect(isDocNodeEmpty(doc())).toBeFalse();
});
it('returns false for a doc with content', () => {
expect(isDocNodeEmpty(doc(p(blockquote('quote this...'))))).toBeFalse();
});
});
describe('isProsemirrorNode', () => {
it('return true for prosemirror nodes', () => {
expect(isProsemirrorNode(p())).toBeTrue();
expect(isProsemirrorNode(blockquote())).toBeTrue();
});
it('returns false for non-prosemirror nodes', () => {
expect(isProsemirrorNode(em())).toBeFalse();
});
});
describe('getMarkAttributes', () => {
it('returns correct mark attrs', () => {
const attributes = { href: '/awesome', title: 'awesome' };
const { aHref } = pm.builders(testSchema, {
aHref: { markType: 'link', ...attributes },
});
const { state, schema } = createEditor(doc(p('a link', aHref('linked <cursor>here'))));
expect(getMarkAttributes(state, schema.marks.link)).toEqual(attributes);
});
it('returns false when mark not found', () => {
const { state, schema } = createEditor(doc(p('a link', em('linked <cursor>here'))));
expect(getMarkAttributes(state, schema.marks.link)).toBeFalse();
});
});
| expect(isTextDomNode(node)).toBeTrue();
});
it('returns false for non-text domNodes', () => {
const node = document.createElement('div');
expect(isTextDomNode(node)).toBeFalse();
});
});
describe('isElementDOMNode', () => {
it('returns true for element domNodes', () => {
const node = document.createElement('div');
expect(isElementDomNode(node)).toBeTrue();
});
it('returns false for non-element domNodes', () => {
const node = document.createTextNode('Text node');
expect(isElementDomNode(node)).toBeFalse();
});
});
describe('getMarkRange', () => {
it('returns the the mark range when in an active mark', () => {
const { state, schema } = createEditor(doc(p('Something', em('is <cursor>italic'))));
expect(getMarkRange(state.selection.$from, schema.marks.em)).toEqual({ from: 10, to: 19 });
});
it('returns false when no active selection', () => {
const { state, schema } = createEditor(doc(p('Something', em('is italic'))));
expect(getMarkRange(state.selection.$from, schema.marks.em)).toBeFalse();
});
it('only returns true when $pos starts within mark', () => {
const { state, schema } = createEditor(doc(p('Some<start>thing', em('is<end> italic'))));
expect(getMarkRange(state.selection.$from, schema.marks.em)).toBeFalse();
});
});
test('getNearestNonTextNode', () => {
const div = document.createElement('div');
const text = document.createTextNode('hello');
div.append(text);
expect(getNearestNonTextElement(text)).toBe(div);
expect(getNearestNonTextElement(div)).toBe(div);
});
describe('selections', () => {
it('recognises a valid text selection', () => {
const { state } = createEditor(doc(p('Some<start>thing<end>')));
expect(isTextSelection(state.selection)).toBeTrue();
expect(isSelection(state.selection)).toBeTrue();
expect(isNodeSelection(state.selection)).toBeFalse();
});
it('recognises a node selection', () => {
const { state } = createEditor(doc(p('Some<node>thing')));
expect(isTextSelection(state.selection)).toBeFalse();
expect(isNodeSelection(state.selection)).toBeTrue();
});
it('returns false for `undefined`', () => {
const noValue = undefined;
expect(isTextSelection(noValue)).toBeFalse();
expect(isNodeSelection(noValue)).toBeFalse();
expect(isSelection(noValue)).toBeFalse();
});
});
describe('getSelectedWord', () => {
it('should select the word the cursor is currently within', () => {
const { state } = createEditor(doc(p('Something thi<cursor>s is a word')));
expect(getSelectedWord(state)).toEqual({ from: 11, to: 15 });
});
it('should select the word the cursor is before', () => {
const { state } = createEditor(doc(p('Something <cursor>this is a word')));
expect(getSelectedWord(state)).toEqual({ from: 11, to: 15 });
});
it('should still select the word for partial selection is before', () => {
const { state } = createEditor(doc(p('Something <start>t<end>his is a word')));
expect(getSelectedWord(state)).toEqual({ from: 11, to: 15 });
});
it('should expand the selection', () => {
const { state } = createEditor(doc(p('Something th<start>is <end>is a word')));
expect(getSelectedWord(state)).toEqual({ from: 11, to: 18 });
});
it('should return false for ambiguous locations', () => {
const { state } = createEditor(doc(p('Something this <cursor> is a word')));
expect(getSelectedWord(state)).toBeFalse();
});
it('should return false for completely empty locations', () => {
const { state } = createEditor(doc(p(' <cursor> ')));
expect(getSelectedWord(state)).toBeFalse();
});
});
describe('atDocEnd', () => {
it('returns true at the end of the document', () => {
const { state } = createEditor(doc(p('Something<cursor>')));
expect(atDocEnd(state)).toBeTrue();
});
it('returns false when no selection', () => {
const { state } = createEditor(doc(p('Something')));
expect(atDocEnd(state)).toBeFalse();
});
it('returns true for a node selection', () => {
const { state } = createEditor(doc(p('<node>Something')));
expect(atDocEnd(state)).toBeTrue();
});
it('returns true for full selection', () => {
const { state } = createEditor(doc(p('<all>Something')));
expect(atDocEnd(state)).toBeTrue();
});
});
describe('atDocStart', () => {
it('returns true at the start of the document', () => {
const { state } = createEditor(doc(p('<cursor>Something')));
expect(atDocStart(state)).toBeTrue();
});
it('returns true for full selection', () => {
const { state } = createEditor(doc(p('Some<all>thing')));
expect(atDocStart(state)).toBeTrue();
});
it('returns false elsewhere', () => {
const { state } = createEditor(doc(p('Someth<cursor>ing')));
expect(atDocStart(state)).toBeFalse();
});
});
test('startPositionOfParent', () => {
const { state } = createEditor(doc(p('Something', p('This has a position<cursor>'))));
expect(startPositionOfParent(state.selection.$from)).toBe(11);
});
test('endPositionOfParent', () => {
const { state } = createEditor(
doc(p('Something', p('This has a position<cursor>'), 'what becomes')),
);
expect(endPositionOfParent(state.selection.$from)).toBe(31);
});
describe('getCursor', () => {
it('returns cursor for a valid text selection', () => {
const { state } = createEditor(doc(p('Something<cursor>')));
expect(getCursor(state.selection)).toEqual((state.selection as TextSelection).$cursor);
});
it('returns undefined for non-text selection', () => {
const { state } = createEditor(doc(p('<node>Something')));
expect(getCursor(state.selection)).toBeUndefined();
});
});
describe('isDocNode', () => {
it('returns true for doc nodes', () => {
expect(isDocNode(doc(), testSchema)).toBeTrue();
expect(isDocNode(doc())).toBeTrue();
});
it('returns false for non-doc nodes', () => {
expect(isDocNode(p())).toBeFalse();
// @ts-expect-error
expect(isDocNode()).toBeFalse();
});
});
describe('isRemirrorJSON', () => {
it('returns true for doc objects', () => {
expect(isRemirrorJSON({ type: 'doc', content: [{ type: 'paragraph' }] })).toBeTrue();
});
it('returns false for non-doc nodes', () => {
expect(isRemirrorJSON({ type: 'paragraph' })).toBeFalse();
});
it('return false when doc node missing content array', () => {
expect(isRemirrorJSON({ type: 'doc' })).toBeFalse();
expect(isRemirrorJSON({ type: 'doc', content: {} })).toBeFalse();
});
});
describe('createDocumentNode', () => {
it('returns the same node if already a document node', () => {
const content = doc(p('Content'));
expect(createDocumentNode({ content, schema: testSchema })).toBe(content);
});
it('creates content via an ObjectNode', () => {
expect(
createDocumentNode({ content: docNodeBasicJSON, schema: testSchema })!.textContent,
).toContain('basic');
});
it('creates content via custom string handler', () => {
expect(
createDocumentNode({
content: '<p>basic html</p>',
schema: testSchema,
stringHandler: fromHtml,
})!.textContent,
).toContain('basic html');
});
});
describe('toHTML', () => {
const node = doc(p('hello'));
it('transforms a doc to its inner html', () => {
expect(toHtml({ node, schema: testSchema })).toBe('<p>hello</p>');
});
it('allows for custom document to be passed in', () => {
expect(toHtml({ node, schema: testSchema, doc: document })).toBe('<p>hello</p>');
});
});
describe('toDOM', () => {
const node = doc(p('hello'));
it('transforms a doc into a documentFragment', () => {
expect(toDom({ node, schema: testSchema })).toBeInstanceOf(DocumentFragment);
});
it('allows for custom document to be passed in', () => {
expect(toDom({ node, schema: testSchema, doc: domino.createDocument() })).toBeTruthy();
});
});
describe('fromHTML', () => {
const content = `<p>Hello</p>`;
it('transform html into a prosemirror node', () => {
expect(fromHtml({ content, schema: testSchema })).toEqualProsemirrorNode(doc(p('Hello')));
});
it('allows for custom document to be passed in', () => {
expect(
fromHtml({ content, schema: testSchema, doc: domino.createDocument() }),
).toEqualProsemirrorNode(doc(p('Hello')));
});
});
test('getRemirrorJSON', () => {
const { state } = createEditor(doc(p('Hello')));
expect(getRemirrorJSON(state)).toEqual({
type: 'doc',
content: [{ type: 'paragraph', content: [{ type: 'text', text: 'Hello' }] }],
});
});
describe('isStateEqual', () => {
it('matches identical states', () => {
const { state } = createEditor(doc(p('Hello')));
expect(areStatesEqual(state, state)).toBeTrue();
});
it('ignores selection by default', () => {
const { state: a } = createEditor(doc(p('<cursor>Hello')));
const { state: b } = createEditor(doc(p('Hello<cursor>')));
expect(areStatesEqual(a, b)).toBeTrue();
});
it('can fail for different selection', () => {
const { state: a } = createEditor(doc(p('<cursor>Hello')));
const { state: b } = createEditor(doc(p('Hello<cursor>')));
expect(areStatesEqual(a, b, { checkSelection: true })).toBeFalse();
});
it('returns false with non identical schema', () => {
const a = renderEditor([]);
const b = renderEditor([]);
a.add(a.nodes.doc(a.nodes.p('Hello')));
b.add(b.nodes.doc(b.nodes.p('Hello')));
expect(areStatesEqual(a.state, b.state)).toBeFalse();
});
});
describe('areSchemasCompatible', () => {
it('is true for identical schema', () => {
const { schema } = renderEditor([]);
expect(areSchemasCompatible(schema, schema)).toBe(true);
});
it('is true for similar schema', () => {
const { schema: a } = renderEditor([]);
const { schema: b } = renderEditor([]);
expect(areSchemasCompatible(a, b)).toBe(true);
});
it('is false for schemas with different mark lengths', () => {
const { schema: a } = renderEditor([new BoldExtension()]);
const { schema: b } = renderEditor([]);
expect(areSchemasCompatible(a, b)).toBe(false);
});
it('is false schemas with different marks', () => {
const { schema: a } = renderEditor([new BoldExtension()]);
const { schema: b } = renderEditor([new ItalicExtension()]);
expect(areSchemasCompatible(a, b)).toBe(false);
});
it('is false schemas with different node lengths', () => {
const { schema: a } = renderEditor([new BlockquoteExtension()]);
const { schema: b } = renderEditor([]);
expect(areSchemasCompatible(a, b)).toBe(false);
});
it('is false schemas with different nodes', () => {
const { schema: a } = renderEditor([new BlockquoteExtension()]);
const { schema: b } = renderEditor([new HeadingExtension()]);
expect(areSchemasCompatible(a, b)).toBe(false);
});
});
describe('getInvalidContent', () => {
const validJSON = {
type: 'doc',
content: [
{
type: 'paragraph',
content: [
{ type: 'text', text: 'This is the content ' },
{
type: 'text',
marks: [{ type: 'em' }, { type: 'strong' }],
text: 'That is strong and italic',
},
],
},
],
};
const invalidJSONMarks = {
type: 'doc',
content: [
{
type: 'paragraph',
content: [
{ type: 'text', text: 'This is the content ' },
{
type: 'text',
marks: [
{ type: 'em', attrs: { href: '//test.com' } },
{ type: 'invalid' },
{ type: 'strong' },
{ type: 'asdf' },
],
text: 'That is strong and italic',
},
],
},
],
};
const invalidJSONNode = {
type: 'doc',
content: [
{
type: 'invalid',
content: [
{ type: 'text', text: 'This is the content ' },
{
type: 'text',
marks: [{ type: 'em' }, { type: 'strong' }],
text: 'That is strong and italic',
},
],
},
{
type: 'paragraph',
content: [
{
type: 'invalid',
content: [{ type: 'text', marks: [{ type: 'em' }], text: 'asdf' }],
},
],
},
],
};
it('returns a transformer which passes for valid json', () => {
expect(getInvalidContent({ json: validJSON, schema: testSchema }).invalidContent).toHaveLength(
0,
);
});
it('`transformers.remove` removes invalid nodes', () => {
const { invalidContent, transformers } = getInvalidContent({
json: invalidJSONNode,
schema: testSchema,
});
expect(transformers.remove(invalidJSONNode, invalidContent)).toEqual({
type: 'doc',
content: [{ type: 'paragraph', content: [] }],
});
});
it('`transformers.remove` removes invalid marks', () => {
const { invalidContent, transformers } = getInvalidContent({
json: invalidJSONMarks,
schema: testSchema,
});
expect(transformers.remove(invalidJSONMarks, invalidContent)).toEqual({
type: 'doc',
content: [
{
type: 'paragraph',
content: [
{ type: 'text', text: 'This is the content ' },
{
type: 'text',
marks: [{ type: 'em', attrs: { href: '//test.com' } }, { type: 'strong' }],
text: 'That is strong and italic',
},
],
},
],
});
});
}); | describe('isTextDOMNode', () => {
it('returns true for text domNodes', () => {
const node = document.createTextNode('Text node');
|
mod.rs | use crate::{
gateway::InterMessage,
model::{
id::{GuildId, UserId},
voice::VoiceState,
},
};
/// Interface for any compatible voice plugin.
///
/// This interface covers several serenity-specific hooks, as well as
/// packet handlers for voice-specific gateway messages.
#[async_trait]
pub trait VoiceGatewayManager: Send + Sync {
/// Performs initial setup at the start of a connection to Discord.
///
/// This will only occur once, and provides the bot's ID and shard count.
async fn initialise(&self, shard_count: u64, user_id: UserId);
/// Handler fired in response to a [`Ready`] event.
///
/// This provides the voice plugin with a channel to send gateway messages to Discord,
/// once per active shard.
///
/// [`Ready`]: crate::model::event::Event
async fn register_shard(&self, shard_id: u64, sender: Sender<InterMessage>);
/// Handler fired in response to a disconnect, reconnection, or rebalance.
///
/// This event invalidates the last sender associated with `shard_id`.
/// Unless the bot is fully disconnecting, this is often followed by a call
/// to [`Self::register_shard`]. Users may wish to buffer manually any gateway messages
/// sent between these calls.
async fn deregister_shard(&self, shard_id: u64);
/// Handler for VOICE_SERVER_UPDATE messages.
///
/// These contain the endpoint and token needed to form a voice connection session.
async fn server_update(&self, guild_id: GuildId, endpoint: &Option<String>, token: &str);
/// Handler for VOICE_STATE_UPDATE messages.
///
/// These contain the session ID needed to form a voice connection session.
async fn state_update(&self, guild_id: GuildId, voice_state: &VoiceState);
} | use async_trait::async_trait;
use futures::channel::mpsc::UnboundedSender as Sender;
|
|
test_integration.py | from __future__ import absolute_import
import responses
import six
from six.moves.urllib.parse import parse_qs, urlencode, urlparse
from sentry.integrations.slack import SlackIntegration
from sentry.models import Identity, IdentityProvider, IdentityStatus, Integration, OrganizationIntegration
from sentry.testutils import IntegrationTestCase
class SlackIntegrationTest(IntegrationTestCase):
provider = SlackIntegration
@responses.activate
def test_basic_flow(self):
resp = self.client.get(self.path)
assert resp.status_code == 302
redirect = urlparse(resp['Location'])
assert redirect.scheme == 'https'
assert redirect.netloc == 'slack.com'
assert redirect.path == '/oauth/authorize'
params = parse_qs(redirect.query)
assert params['scope'] == [' '.join(self.provider.identity_oauth_scopes)]
assert params['state']
assert params['redirect_uri'] == ['http://testserver/extensions/slack/setup/']
assert params['response_type'] == ['code']
assert params['client_id'] == ['slack-client-id']
# once we've asserted on it, switch to a singular values to make life
# easier
authorize_params = {k: v[0] for k, v in six.iteritems(params)}
responses.add(
responses.POST, 'https://slack.com/api/oauth.token',
json={
'ok': True,
'user_id': 'UXXXXXXX1',
'access_token': 'xoxp-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx',
'team_id': 'TXXXXXXX1',
'team_name': 'Example', | responses.add(
responses.GET, 'https://slack.com/api/team.info',
json={
'ok': True,
'team': {
'domain': 'test-slack-workspace',
'icon': {'image_132': 'http://example.com/ws_icon.jpg'},
},
}
)
resp = self.client.get('{}?{}'.format(
self.path,
urlencode({
'code': 'oauth-code',
'state': authorize_params['state'],
})
))
mock_request = responses.calls[0].request
req_params = parse_qs(mock_request.body)
assert req_params['grant_type'] == ['authorization_code']
assert req_params['code'] == ['oauth-code']
assert req_params['redirect_uri'] == ['http://testserver/extensions/slack/setup/']
assert req_params['client_id'] == ['slack-client-id']
assert req_params['client_secret'] == ['slack-client-secret']
assert resp.status_code == 200
self.assertDialogSuccess(resp)
integration = Integration.objects.get(provider=self.provider.key)
assert integration.external_id == 'TXXXXXXX1'
assert integration.name == 'Example'
assert integration.metadata == {
'access_token': 'xoxp-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx',
'scopes': sorted(self.provider.identity_oauth_scopes),
'icon': 'http://example.com/ws_icon.jpg',
'domain_name': 'test-slack-workspace.slack.com',
}
oi = OrganizationIntegration.objects.get(
integration=integration,
organization=self.organization,
)
assert oi.config == {}
idp = IdentityProvider.objects.get(
type='slack',
organization=self.organization,
)
identity = Identity.objects.get(
idp=idp,
user=self.user,
external_id='UXXXXXXX1',
)
assert identity.status == IdentityStatus.VALID | 'installer_user_id': 'UXXXXXXX1',
}
)
|
__init__.py | from flask_restful import *
import datetime
mongodb_url = "mongodb://Ranuga:[email protected]:27017,cluster0-shard-00-01.6n3dg.mongodb.net:27017,cluster0-shard-00-02.6n3dg.mongodb.net:27017/myFirstDatabase?ssl=true&replicaSet=atlas-uo9rgq-shard-0&authSource=admin&retryWrites=true&w=majority"
app = Flask(__name__)
app.debug = True
app.secret_key = "development"
cluster = MongoClient(mongodb_url)
from server.routes import * | from pymongo import *
from flask import * |
|
biastestctrl.rs | #[doc = "Reader of register BIASTESTCTRL"]
pub type R = crate::R<u32, super::BIASTESTCTRL>;
#[doc = "Writer for register BIASTESTCTRL"]
pub type W = crate::W<u32, super::BIASTESTCTRL>;
#[doc = "Register BIASTESTCTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::BIASTESTCTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `BIAS_RIP_RESET`"]
pub type BIAS_RIP_RESET_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BIAS_RIP_RESET`"]
pub struct BIAS_RIP_RESET_W<'a> {
w: &'a mut W,
}
impl<'a> BIAS_RIP_RESET_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)] | }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
impl R {
#[doc = "Bit 3 - Reset Bias Ripple Counter"]
#[inline(always)]
pub fn bias_rip_reset(&self) -> BIAS_RIP_RESET_R {
BIAS_RIP_RESET_R::new(((self.bits >> 3) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 3 - Reset Bias Ripple Counter"]
#[inline(always)]
pub fn bias_rip_reset(&mut self) -> BIAS_RIP_RESET_W {
BIAS_RIP_RESET_W { w: self }
}
} | pub fn set_bit(self) -> &'a mut W {
self.bit(true) |
with_id.rs | use super::{CurrentId, IntoIterator, Shiperator};
/// Shiperator yielding `EntityId` as well.
#[derive(Clone, Copy)]
pub struct WithId<I> {
iter: I,
}
impl<I> WithId<I> {
pub(super) fn new(iter: I) -> Self {
WithId { iter }
}
}
impl<I: CurrentId> Shiperator for WithId<I> {
type Item = (I::Id, I::Item);
fn first_pass(&mut self) -> Option<Self::Item> {
let item = self.iter.first_pass()?;
// SAFE first_pass is called before
Some((unsafe { self.iter.current_id() }, item))
}
fn post_process(&mut self) {
self.iter.post_process()
}
fn | (&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<I: CurrentId> CurrentId for WithId<I> {
type Id = I::Id;
unsafe fn current_id(&self) -> Self::Id {
self.iter.current_id()
}
}
impl<I: CurrentId> core::iter::IntoIterator for WithId<I> {
type IntoIter = IntoIterator<Self>;
type Item = <Self as Shiperator>::Item;
fn into_iter(self) -> Self::IntoIter {
IntoIterator(self)
}
}
| size_hint |
brokercell.go | /*
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package brokercell
import (
"context"
"fmt"
"github.com/google/knative-gcp/pkg/logging"
"github.com/kelseyhightower/envconfig"
"go.uber.org/zap"
hpav2beta2 "k8s.io/api/autoscaling/v2beta2"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/equality"
apierrs "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
appsv1listers "k8s.io/client-go/listers/apps/v1"
hpav2beta2listers "k8s.io/client-go/listers/autoscaling/v2beta2"
corev1listers "k8s.io/client-go/listers/core/v1"
"knative.dev/eventing/pkg/reconciler/names"
pkgreconciler "knative.dev/pkg/reconciler"
intv1alpha1 "github.com/google/knative-gcp/pkg/apis/intevents/v1alpha1"
bcreconciler "github.com/google/knative-gcp/pkg/client/injection/reconciler/intevents/v1alpha1/brokercell"
brokerlisters "github.com/google/knative-gcp/pkg/client/listers/broker/v1beta1"
"github.com/google/knative-gcp/pkg/reconciler"
"github.com/google/knative-gcp/pkg/reconciler/brokercell/resources"
reconcilerutils "github.com/google/knative-gcp/pkg/reconciler/utils"
)
type envConfig struct {
IngressImage string `envconfig:"INGRESS_IMAGE" required:"true"`
FanoutImage string `envconfig:"FANOUT_IMAGE" required:"true"`
RetryImage string `envconfig:"RETRY_IMAGE" required:"true"`
ServiceAccountName string `envconfig:"SERVICE_ACCOUNT" default:"broker"`
IngressPort int `envconfig:"INGRESS_PORT" default:"8080"`
MetricsPort int `envconfig:"METRICS_PORT" default:"9090"`
InternalMetricsEnabled bool `envconfig:"INTERNAL_METRICS_ENABLED" default:"false"`
}
type listers struct {
brokerLister brokerlisters.BrokerLister
hpaLister hpav2beta2listers.HorizontalPodAutoscalerLister
triggerLister brokerlisters.TriggerLister
configMapLister corev1listers.ConfigMapLister
serviceLister corev1listers.ServiceLister
endpointsLister corev1listers.EndpointsLister
deploymentLister appsv1listers.DeploymentLister
podLister corev1listers.PodLister
}
// NewReconciler creates a new BrokerCell reconciler.
func NewReconciler(base *reconciler.Base, ls listers) (*Reconciler, error) {
var env envConfig
if err := envconfig.Process("BROKER_CELL", &env); err != nil {
return nil, err
}
svcRec := &reconcilerutils.ServiceReconciler{
KubeClient: base.KubeClientSet,
ServiceLister: ls.serviceLister,
EndpointsLister: ls.endpointsLister,
Recorder: base.Recorder,
}
deploymentRec := &reconcilerutils.DeploymentReconciler{
KubeClient: base.KubeClientSet,
Lister: ls.deploymentLister,
Recorder: base.Recorder,
}
cmRec := &reconcilerutils.ConfigMapReconciler{
KubeClient: base.KubeClientSet,
Lister: ls.configMapLister,
Recorder: base.Recorder,
}
r := &Reconciler{
Base: base,
env: env,
listers: ls,
svcRec: svcRec,
deploymentRec: deploymentRec,
cmRec: cmRec,
}
return r, nil
}
// Reconciler implements controller.Reconciler for BrokerCell resources.
type Reconciler struct {
*reconciler.Base
listers
svcRec *reconcilerutils.ServiceReconciler
deploymentRec *reconcilerutils.DeploymentReconciler
cmRec *reconcilerutils.ConfigMapReconciler
env envConfig
}
// Check that our Reconciler implements Interface
var _ bcreconciler.Interface = (*Reconciler)(nil)
// ReconcileKind implements Interface.ReconcileKind.
func (r *Reconciler) ReconcileKind(ctx context.Context, bc *intv1alpha1.BrokerCell) pkgreconciler.Event {
// Why are we doing GC here instead of in the broker controller?
// 1. It's tricky to handle concurrency in broker controller. Suppose you are deleting all
// brokers at the same time, hard to tell if the brokercell should be gc'ed.
// 2. It's also more reliable. If for some reason we didn't delete the brokercell in the broker
// controller when we should (due to race, missing event, bug, etc), and if all the brokers are
// deleted, then we don't have a chance to retry.
// TODO(https://github.com/google/knative-gcp/issues/1196) It's cleaner to make this a separate controller.
if r.shouldGC(ctx, bc) {
logging.FromContext(ctx).Info("Garbage collecting brokercell", zap.String("brokercell", bc.Name), zap.String("Namespace", bc.Namespace))
return r.delete(ctx, bc)
}
bc.Status.InitializeConditions()
// Reconcile broker targets configmap first so that data plane pods are guaranteed to have the configmap volume
// mount available.
if err := r.reconcileConfig(ctx, bc); err != nil {
return err
}
// Reconcile ingress deployment, HPA and service.
ingressArgs := r.makeIngressArgs(bc)
ind, err := r.deploymentRec.ReconcileDeployment(ctx, bc, resources.MakeIngressDeployment(ingressArgs))
if err != nil {
logging.FromContext(ctx).Error("Failed to reconcile ingress deployment", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkIngressFailed("IngressDeploymentFailed", "Failed to reconcile ingress deployment: %v", err)
return err
}
ingressHPA := resources.MakeHorizontalPodAutoscaler(ind, r.makeIngressHPAArgs(bc))
if err := r.reconcileAutoscaling(ctx, bc, ingressHPA); err != nil {
logging.FromContext(ctx).Error("Failed to reconcile ingress HPA", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkIngressFailed("HorizontalPodAutoscalerFailed", "Failed to reconcile ingress HorizontalPodAutoscaler: %v", err)
return err
}
endpoints, err := r.svcRec.ReconcileService(ctx, bc, resources.MakeIngressService(ingressArgs))
if err != nil {
logging.FromContext(ctx).Error("Failed to reconcile ingress service", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkIngressFailed("IngressServiceFailed", "Failed to reconcile ingress service: %v", err)
return err
}
bc.Status.PropagateIngressAvailability(endpoints)
hostName := names.ServiceHostName(endpoints.GetName(), endpoints.GetNamespace())
bc.Status.IngressTemplate = fmt.Sprintf("http://%s/{namespace}/{name}", hostName)
// Reconcile fanout deployment and HPA.
fd, err := r.deploymentRec.ReconcileDeployment(ctx, bc, resources.MakeFanoutDeployment(r.makeFanoutArgs(bc)))
if err != nil {
logging.FromContext(ctx).Error("Failed to reconcile fanout deployment", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkFanoutFailed("FanoutDeploymentFailed", "Failed to reconcile fanout deployment: %v", err)
return err
}
fanoutHPA := resources.MakeHorizontalPodAutoscaler(fd, r.makeFanoutHPAArgs(bc))
if err := r.reconcileAutoscaling(ctx, bc, fanoutHPA); err != nil {
logging.FromContext(ctx).Error("Failed to reconcile fanout HPA", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkFanoutFailed("HorizontalPodAutoscalerFailed", "Failed to reconcile fanout HorizontalPodAutoscaler: %v", err)
return err
}
bc.Status.PropagateFanoutAvailability(fd)
// Reconcile retry deployment and HPA.
rd, err := r.deploymentRec.ReconcileDeployment(ctx, bc, resources.MakeRetryDeployment(r.makeRetryArgs(bc)))
if err != nil {
logging.FromContext(ctx).Error("Failed to reconcile retry deployment", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkRetryFailed("RetryDeploymentFailed", "Failed to reconcile retry deployment: %v", err)
return err
}
retryHPA := resources.MakeHorizontalPodAutoscaler(rd, r.makeRetryHPAArgs(bc))
if err := r.reconcileAutoscaling(ctx, bc, retryHPA); err != nil {
logging.FromContext(ctx).Error("Failed to reconcile retry HPA", zap.Any("namespace", bc.Namespace), zap.Any("name", bc.Name), zap.Error(err))
bc.Status.MarkRetryFailed("HorizontalPodAutoscalerFailed", "Failed to reconcile retry HorizontalPodAutoscaler: %v", err)
return err
}
bc.Status.PropagateRetryAvailability(rd)
bc.Status.ObservedGeneration = bc.Generation
return pkgreconciler.NewEvent(corev1.EventTypeNormal, "BrokerCellReconciled", "BrokerCell reconciled: \"%s/%s\"", bc.Namespace, bc.Name)
}
// shouldGC returns true if
// 1. the brokercell was automatically created by GCP broker controller (with annotation
// internal.events.cloud.google.com/creator: googlecloud), and
// 2. there is no brokers pointing to it
func (r *Reconciler) shouldGC(ctx context.Context, bc *intv1alpha1.BrokerCell) bool {
// TODO use the constants in #1132 once it's merged
// We only garbage collect brokercells that were automatically created by the GCP broker controller.
if bc.GetAnnotations()["internal.events.cloud.google.com/creator"] != "googlecloud" {
return false
}
// TODO(#866) Only select brokers that point to this brokercell by label selector once the
// webhook assigns the brokercell label, i.e.,
// r.brokerLister.List(labels.SelectorFromSet(map[string]string{"brokercell":bc.Name, "brokercellns":bc.Namespace}))
brokers, err := r.brokerLister.List(labels.Everything())
if err != nil {
logging.FromContext(ctx).Error("Failed to list brokers, skipping garbage collection logic", zap.String("brokercell", bc.Name), zap.String("Namespace", bc.Namespace))
return false
}
return len(brokers) == 0
}
func (r *Reconciler) delete(ctx context.Context, bc *intv1alpha1.BrokerCell) pkgreconciler.Event {
if err := r.RunClientSet.InternalV1alpha1().BrokerCells(bc.Namespace).Delete(ctx, bc.Name, metav1.DeleteOptions{}); err != nil {
return fmt.Errorf("failed to garbage collect brokercell: %w", err)
}
return pkgreconciler.NewEvent(corev1.EventTypeNormal, "BrokerCellGarbageCollected", "BrokerCell garbage collected: \"%s/%s\"", bc.Namespace, bc.Name)
}
func (r *Reconciler) makeIngressArgs(bc *intv1alpha1.BrokerCell) resources.IngressArgs {
return resources.IngressArgs{
Args: resources.Args{
ComponentName: resources.IngressName,
BrokerCell: bc,
Image: r.env.IngressImage,
ServiceAccountName: r.env.ServiceAccountName,
MetricsPort: r.env.MetricsPort,
AllowIstioSidecar: true,
CPURequest: bc.Spec.Components.Ingress.CPURequest,
CPULimit: bc.Spec.Components.Ingress.CPULimit,
MemoryRequest: bc.Spec.Components.Ingress.MemoryRequest,
MemoryLimit: bc.Spec.Components.Ingress.MemoryLimit,
},
Port: r.env.IngressPort,
}
}
func (r *Reconciler) makeIngressHPAArgs(bc *intv1alpha1.BrokerCell) resources.AutoscalingArgs {
return resources.AutoscalingArgs{
ComponentName: resources.IngressName,
BrokerCell: bc,
AvgCPUUtilization: bc.Spec.Components.Ingress.AvgCPUUtilization,
AvgMemoryUsage: bc.Spec.Components.Ingress.AvgMemoryUsage,
MaxReplicas: *bc.Spec.Components.Ingress.MaxReplicas,
MinReplicas: *bc.Spec.Components.Ingress.MinReplicas,
}
}
func (r *Reconciler) makeFanoutArgs(bc *intv1alpha1.BrokerCell) resources.FanoutArgs {
return resources.FanoutArgs{
Args: resources.Args{
ComponentName: resources.FanoutName,
BrokerCell: bc,
Image: r.env.FanoutImage,
ServiceAccountName: r.env.ServiceAccountName,
MetricsPort: r.env.MetricsPort,
AllowIstioSidecar: true,
CPURequest: bc.Spec.Components.Fanout.CPURequest,
CPULimit: bc.Spec.Components.Fanout.CPULimit,
MemoryRequest: bc.Spec.Components.Fanout.MemoryRequest,
MemoryLimit: bc.Spec.Components.Fanout.MemoryLimit,
},
}
}
func (r *Reconciler) makeFanoutHPAArgs(bc *intv1alpha1.BrokerCell) resources.AutoscalingArgs {
return resources.AutoscalingArgs{
ComponentName: resources.FanoutName,
BrokerCell: bc,
AvgCPUUtilization: bc.Spec.Components.Fanout.AvgCPUUtilization,
AvgMemoryUsage: bc.Spec.Components.Fanout.AvgMemoryUsage,
MaxReplicas: *bc.Spec.Components.Fanout.MaxReplicas,
MinReplicas: *bc.Spec.Components.Fanout.MinReplicas,
}
}
func (r *Reconciler) makeRetryArgs(bc *intv1alpha1.BrokerCell) resources.RetryArgs {
return resources.RetryArgs{
Args: resources.Args{
ComponentName: resources.RetryName,
BrokerCell: bc,
Image: r.env.RetryImage,
ServiceAccountName: r.env.ServiceAccountName,
MetricsPort: r.env.MetricsPort,
AllowIstioSidecar: true,
CPURequest: bc.Spec.Components.Retry.CPURequest,
CPULimit: bc.Spec.Components.Retry.CPULimit,
MemoryRequest: bc.Spec.Components.Retry.MemoryRequest,
MemoryLimit: bc.Spec.Components.Retry.MemoryLimit,
},
}
}
func (r *Reconciler) makeRetryHPAArgs(bc *intv1alpha1.BrokerCell) resources.AutoscalingArgs {
return resources.AutoscalingArgs{
ComponentName: resources.RetryName,
BrokerCell: bc,
AvgCPUUtilization: bc.Spec.Components.Retry.AvgCPUUtilization,
AvgMemoryUsage: bc.Spec.Components.Retry.AvgMemoryUsage,
MaxReplicas: *bc.Spec.Components.Retry.MaxReplicas,
MinReplicas: *bc.Spec.Components.Retry.MinReplicas,
}
}
func (r *Reconciler) reconcileAutoscaling(ctx context.Context, bc *intv1alpha1.BrokerCell, desired *hpav2beta2.HorizontalPodAutoscaler) error {
existing, err := r.hpaLister.HorizontalPodAutoscalers(desired.Namespace).Get(desired.Name)
if apierrs.IsNotFound(err) {
existing, err = r.KubeClientSet.AutoscalingV2beta2().HorizontalPodAutoscalers(desired.Namespace).Create(ctx, desired, metav1.CreateOptions{})
if apierrs.IsAlreadyExists(err) {
return nil
}
if err == nil {
r.Recorder.Eventf(bc, corev1.EventTypeNormal, "HorizontalPodAutoscalerCreated", "Created HPA %s/%s", desired.Namespace, desired.Name)
}
return err
}
if err != nil {
return err
}
if !equality.Semantic.DeepDerivative(desired.Spec, existing.Spec) {
// Don't modify the informers copy.
copy := existing.DeepCopy()
copy.Spec = desired.Spec
_, err := r.KubeClientSet.AutoscalingV2beta2().HorizontalPodAutoscalers(copy.Namespace).Update(ctx, copy, metav1.UpdateOptions{})
if err == nil {
r.Recorder.Eventf(bc, corev1.EventTypeNormal, "HorizontalPodAutoscalerUpdated", "Updated HPA %s/%s", desired.Namespace, desired.Name)
}
return err
}
return nil
} | |
util.py | '''
system_hotkey.util
general utilites..
'''
import _thread as thread
import threading
from queue import Queue
import queue
from functools import wraps
import time
def unique_int(values):
'''
returns the first lowest integer
that is not in the sequence passed in
if a list looks like 3,6
of the first call will return 1, and then 2
and then 4 etc
'''
last = 0
for num in values:
if last not in values:
break
else:
last += 1
return last
| def __init__(self):
self.queue = queue.Queue()
def catch_and_raise(self, func, timeout=0.5):
'''
wait for a function to finish and raise any errors'''
self.wait_event(func, timeout)
self._check_for_errors(func)
def mark_done(self, function):
'''Wrap functions so that we can monitor when they are done'''
self.init_wrap(function)
@wraps(function)
def decorator(*args, **kwargs):
# Function has started running
self.clear_event(function)
try:
results = function(*args, **kwargs)
except Exception as err:
self.queue.put(err)
else:
return results
finally:
# Function has finished running
self.set_event(function)
return decorator
def put(self, x):
self.queue.put(x)
def init_wrap(self, func):
name = self._make_event_name(func)
event = threading.Event()
setattr(self, name, event)
def _check_for_errors(self, func):
try:
error = self.queue.get(block=False)
except queue.Empty:
pass
else:
raise error
def _make_event_name(self, func):
return '_event_' + func.__name__
def get_event(self, func):
return getattr(self, self._make_event_name(func))
def set_event(self, func):
self.get_event(func).set()
def clear_event(self, func):
self.get_event(func).clear()
def wait_event(self, func, *args):
self.get_event(func).wait(*args)
class CallSerializer():
def __init__(self):
self.queue = Queue()
thread.start_new_thread(self.call_functions, (),)
self.bug_catcher = ExceptionSerializer()
def call_functions(self):
while 1:
func, args, kwargs = self.queue.get(block=True)
func(*args, **kwargs)
def serialize_call(self, timeout=0.5):
'''
a call to a function decorated will not have
overlapping calls, i.e thread safe
'''
def state(function):
@wraps(function)
def decorator(*args, **kwargs):
# Function will let us know when it is done running
# This is done so we can catch exceptions raised
# in functions that are run within threads
mark_func = self.bug_catcher.mark_done(function)
self.queue.put((mark_func, args, kwargs))
# wait for the function to finish and raise errors
self.bug_catcher.catch_and_raise(function, timeout)
return decorator
return state | class ExceptionSerializer(): |
instance.go | package main
import (
"encoding/base64"
"fmt"
"io/ioutil"
"os"
"path"
"strings"
"github.com/docker/infrakit/pkg/spi/instance"
"github.com/docker/infrakit/pkg/types"
maas "github.com/juju/gomaasapi"
"net/url"
)
// NewMaasPlugin creates an instance plugin for MaaS.
func NewMaasPlugin(dir string, key string, url string, version string) instance.Plugin |
type maasPlugin struct {
MaasfilesDir string
MaasObj *maas.MAASObject
controller maas.Controller
}
// Validate performs local validation on a provision request.
func (m maasPlugin) Validate(req *types.Any) error {
return nil
}
func (m maasPlugin) convertSpecToMaasParam(spec map[string]interface{}) url.Values {
param := url.Values{}
return param
}
func (m maasPlugin) addTag(name string, comment string) (maas.JSONObject, error) {
tl := m.MaasObj.GetSubObject("tags")
t, err := tl.CallPost("", url.Values{"name": []string{name}, "comment": []string{comment}})
return t, err
}
func (m maasPlugin) delTag(name string) error {
err := m.MaasObj.GetSubObject("tags").GetSubObject(name).Delete()
return err
}
func (m maasPlugin) addTagToNodes(systemIDs []string, tagname string, comment string) error {
tObj, err := m.MaasObj.GetSubObject("tags").GetSubObject(tagname).Get()
if err != nil {
t, err := m.addTag(tagname, comment)
if err != nil {
return err
}
tObj, err = t.GetMAASObject()
if err != nil {
return err
}
}
_, err = tObj.CallPost("update_nodes", url.Values{"add": systemIDs})
if err != nil {
return err
}
return nil
}
func (m maasPlugin) removeTagfromNodes(systemIDs []string, tag string) error {
tObj := m.MaasObj.GetSubObject("tags").GetSubObject(tag)
_, err := tObj.CallPost("update_nodes", url.Values{"remove": systemIDs})
if err != nil {
return err
}
return nil
}
func (m maasPlugin) getTagsFromNode(systemID string) ([]string, error) {
ms, err := m.controller.Machines(maas.MachinesArgs{SystemIDs: []string{systemID}})
if err != nil {
return nil, err
}
if len(ms) != 1 {
return nil, fmt.Errorf("Invalid systemID %s", systemID)
}
ret := ms[0].Tags()
return ret, nil
}
// Provision creates a new instance.
func (m maasPlugin) Provision(spec instance.Spec) (*instance.ID, error) {
var properties map[string]interface{}
if spec.Properties != nil {
if err := spec.Properties.Decode(&properties); err != nil {
return nil, fmt.Errorf("Invalid instance properties: %s", err)
}
}
ama := maas.AllocateMachineArgs{}
if spec.LogicalID != nil {
ms, err := m.controller.Machines(maas.MachinesArgs{})
if err != nil {
return nil, err
}
ipcont := func(reqip string, machines []maas.Machine) (bool, string) {
for _, i := range machines {
if arrayContains(i.IPAddresses(), reqip) {
return true, i.Hostname()
}
}
return false, ""
}
r, hn := ipcont(string(*spec.LogicalID), ms)
if !r {
return nil, fmt.Errorf("Invalid LogicalID (%s) you should set static IP", spec.LogicalID)
}
ama.Hostname = hn
}
am, _, err := m.controller.AllocateMachine(ama)
if err != nil {
return nil, err
}
if err := am.Start(maas.StartArgs{
UserData: base64.StdEncoding.EncodeToString([]byte(spec.Init)),
}); err != nil {
return nil, err
}
systemID := am.SystemID()
id := instance.ID(systemID)
machineDir, err := ioutil.TempDir(m.MaasfilesDir, "infrakit-")
if err != nil {
return nil, err
}
if err := ioutil.WriteFile(path.Join(machineDir, "MachineID"), []byte(systemID), 0755); err != nil {
return nil, err
}
err = m.Label(id, spec.Tags)
if err != nil {
return nil, err
}
if spec.LogicalID != nil {
if err := ioutil.WriteFile(path.Join(machineDir, "ip"), []byte(*spec.LogicalID), 0666); err != nil {
return nil, err
}
}
return &id, nil
}
// Label labels the instance
func (m maasPlugin) Label(id instance.ID, labels map[string]string) error {
tags, err := m.getTagsFromNode(string(id))
if err != nil {
return err
}
for _, t := range tags {
m.removeTagfromNodes([]string{string(id)}, t)
}
for k, v := range labels {
tag := strings.Replace(k, ".", "_", -1) + "_" + v
err = m.addTagToNodes([]string{string(id)}, tag, v)
if err != nil {
return err
}
}
return nil
}
// Destroy terminates an existing instance.
func (m maasPlugin) Destroy(id instance.ID, context instance.Context) error {
err := m.Label(id, map[string]string{})
if err != nil {
return err
}
err = m.controller.ReleaseMachines(maas.ReleaseMachinesArgs{SystemIDs: []string{string(id)}})
if err != nil {
return err
}
files, err := ioutil.ReadDir(m.MaasfilesDir)
if err != nil {
return err
}
for _, file := range files {
if !file.IsDir() {
continue
}
machineDir := path.Join(m.MaasfilesDir, file.Name())
systemID, err := ioutil.ReadFile(path.Join(machineDir, "MachineID"))
if err != nil {
if os.IsNotExist(err) {
continue
}
return err
} else if id == instance.ID(systemID) {
if err := os.RemoveAll(machineDir); err != nil {
return err
}
}
}
return nil
}
func arrayContains(arr []string, str string) bool {
for _, v := range arr {
if v == str {
return true
}
}
return false
}
// DescribeInstances returns descriptions of all instances matching all of the provided tags.
func (m maasPlugin) DescribeInstances(tags map[string]string, properties bool) ([]instance.Description, error) {
var ret []instance.Description
ms, err := m.controller.Machines(maas.MachinesArgs{})
if err != nil {
return nil, err
}
tagcomp := func(reqtags map[string]string, nodetags []string) bool {
for ot, v := range tags {
tag := strings.Replace(ot, ".", "_", -1) + "_" + v
if !arrayContains(nodetags, tag) {
return false
}
}
return true
}
for _, m := range ms {
nodeTags := m.Tags()
if err != nil {
return nil, err
}
if tagcomp(tags, nodeTags) {
systemID := m.SystemID()
lid := instance.LogicalID(m.IPAddresses()[0])
ret = append(ret, instance.Description{
ID: instance.ID(systemID),
Tags: tags,
LogicalID: &lid,
})
}
}
return ret, nil
}
| {
var err error
var authClient *maas.Client
url = url + "/MAAS"
verurl := maas.AddAPIVersionToURL(url, version)
if key != "" {
authClient, err = maas.NewAuthenticatedClient(verurl, key)
} else {
authClient, err = maas.NewAnonymousClient(url, version)
}
if err != nil {
return nil
}
ctl, err := maas.NewController(maas.ControllerArgs{
BaseURL: verurl,
APIKey: key,
})
if err != nil {
return nil
}
maasobj := maas.NewMAAS(*authClient)
return &maasPlugin{MaasfilesDir: dir, MaasObj: maasobj, controller: ctl}
} |
install.py | from .utils.defaults import default_depot_path, default_install_dir, default_symlink_dir
from .utils.filters import f_major_version, f_minor_version
from .utils import query_yes_no
from .utils import current_architecture, current_system, current_libc
from .utils import latest_version
from .utils import DmgMounter, TarMounter
from .utils import Version
from .utils import verify_upstream
from .utils import color, show_verbose
from .download import download_package
import os
import re
import shutil
import subprocess
def is_installed(version, check_symlinks=True):
"""
check if the required version is already installed.
"""
check_list = ["julia"]
if version == "latest":
check_list.append("julia-latest")
if version != "latest" and check_symlinks:
check_list.extend([f"julia-{f_major_version(version)}",
f"julia-{f_minor_version(version)}"])
for path in check_list:
if Version(get_exec_version(shutil.which(path))) != Version(version):
return False
return True
def get_exec_version(path):
ver_cmd = [path, "--version"]
try:
# outputs: "julia version 1.4.0-rc1"
version = subprocess.check_output(ver_cmd).decode("utf-8")
version = version.lower().split("version")[-1].strip()
except: # nopep8
# in case it fails in any situation: invalid target or command(.cmd)
# issue: https://github.com/abelsiqueira/jill/issues/25
version = "0.0.1"
return version
def check_installer(installer_path, ext):
filename = os.path.basename(installer_path)
if not filename.endswith(ext):
msg = f"The installer {filename} should be {ext} file"
raise ValueError(msg)
def last_julia_version(version=None):
# version should follow semantic version syntax
def sort_key(ver):
return float(ver.lstrip("v"))
version = float(f_minor_version(version)) if version else 999.999
proj_versions = os.listdir(os.path.join(default_depot_path(),
"environments"))
proj_versions = [x for x in proj_versions if re.fullmatch(r"v\d+\.\d+", x)]
proj_versions = sorted(filter(lambda ver: sort_key(ver) < version,
proj_versions),
key=sort_key)
if proj_versions:
return proj_versions[-1]
else:
return None
def make_symlinks(src_bin, symlink_dir, version):
if not os.path.isfile(src_bin):
raise(ValueError(f"{src_bin} doesn't exist."))
system = current_system()
if symlink_dir not in map(os.path.normpath, os.environ["PATH"].split(os.pathsep)):
print(f"add {symlink_dir} to PATH")
if system == "winnt":
# FIXME: this alse copies system PATH to user PATH
subprocess.run(["powershell.exe",
"setx", "PATH", f'"$env:PATH;{symlink_dir}"'])
else:
msg = "~/.bashrc will be modified"
msg += "\nif you're not using BASH, then you'll need manually"
msg += f" add {symlink_dir} to your PATH"
print(msg)
rc_file = os.path.expanduser("~/.bashrc")
with open(rc_file, "a") as file:
file.writelines("\n# added by jill\n")
file.writelines(f"export PATH={symlink_dir}:$PATH\n")
print(f"you need to restart your current shell to update PATH")
os.makedirs(symlink_dir, exist_ok=True)
new_ver = Version(get_exec_version(src_bin))
if version == "latest":
# issue 11: don't symlink to julia
link_list = ["julia-latest"]
elif len(Version(version).build) > 0:
link_list = ["julia-dev"]
elif len(new_ver.prerelease) > 0:
# issue #76
# - it is usually unwanted to symlink unstable release to `julia` and `julia-x`
# - still symlink to `julia-x.y` because otherwise there is no way to access the unstable
# release.
link_list = [f"julia-{f_minor_version(version)}"]
else:
link_list = [f"julia-{f(version)}" for f in (f_major_version,
f_minor_version)]
link_list.append("julia")
for linkname in link_list:
linkpath = os.path.join(symlink_dir, linkname)
if current_system() == "winnt":
linkpath += ".cmd"
# symlink rules:
# 1. always symlink latest
# 2. only make new symlink if it's a newer version
# - julia --> latest stable X.Y.Z
# - julia-1 --> latest stable 1.Y.Z
# - julia-1.0 --> latest stable 1.0.Z
# - don't make symlink to patch level
if os.path.exists(linkpath) or os.path.islink(linkpath):
if (os.path.islink(linkpath) and
os.readlink(linkpath) == src_bin):
# happens when installing a new patch version
continue
old_ver = Version(get_exec_version(linkpath))
if show_verbose():
print(f"old symlink version: {old_ver}")
print(f"new installation version: {new_ver}")
if old_ver > new_ver:
# if two versions are the same, use the new one
continue
msg = f"{color.YELLOW}remove old symlink"
msg += f" {linkname}{color.END}"
print(msg)
os.remove(linkpath)
print(f"{color.GREEN}make new symlink {linkpath}{color.END}")
if current_system() == "winnt":
with open(linkpath, 'w') as f:
# create a cmd file to mimic how we do symlinks in linux
f.writelines(['@echo off\n', f'"{src_bin}" %*'])
else:
os.symlink(src_bin, linkpath)
def copy_root_project(version):
mver = f_minor_version(version)
old_ver = last_julia_version(version)
if old_ver is None:
print(
f"Can't find available old root project for version {version}")
return None
env_path = os.path.join(default_depot_path(), "environments")
src_path = os.path.join(env_path, old_ver)
dest_path = os.path.join(env_path, f"v{mver}")
if src_path == dest_path:
return None
if os.path.exists(dest_path):
bak_path = os.path.join(env_path, f"v{mver}.bak")
if os.path.exists(bak_path):
print(f"{color.YELLOW}delete old backup {bak_path}{color.END}")
shutil.rmtree(bak_path)
shutil.move(dest_path, bak_path)
print(f"{color.YELLOW}move {dest_path} to {bak_path}{color.END}")
shutil.copytree(src_path, dest_path)
def install_julia_tarball(package_path,
install_dir,
symlink_dir,
version,
upgrade):
check_installer(package_path, ".tar.gz")
if re.match("(.*)\+(\w+)$", version):
# We want a different folder name for commit builds so that we can have
# julia-dev and julia-latest points to two different julia versions
suffix = 'dev'
else:
suffix = f_minor_version(version)
with TarMounter(package_path) as root:
src_path = root
dest_path = os.path.join(install_dir, f"julia-{suffix}")
if os.path.exists(dest_path):
shutil.rmtree(dest_path)
msg = f"{color.YELLOW}remove previous Julia installation:"
msg += f" {dest_path}{color.END}"
print(msg)
# preserve lib symlinks, otherwise it might cause troubles
# see also: https://github.com/JuliaGPU/CUDA.jl/issues/249
shutil.copytree(src_path, dest_path, symlinks=True)
print(f"{color.GREEN}install Julia to {dest_path}{color.END}")
os.chmod(dest_path, 0o755) # issue 12
bin_path = os.path.join(dest_path, "bin", "julia")
if current_system() == 'winnt':
bin_path += '.exe'
make_symlinks(bin_path, symlink_dir, version)
if upgrade:
copy_root_project(version)
return True
def install_julia_dmg(package_path,
install_dir,
symlink_dir,
version,
upgrade):
check_installer(package_path, ".dmg")
with DmgMounter(package_path) as root:
# mounted image contents:
# ['.VolumeIcon.icns', 'Applications', 'Julia-1.3.app']
appname = next(filter(lambda x: x.lower().startswith('julia'),
os.listdir(root)))
src_path = os.path.join(root, appname)
dest_path = os.path.join(install_dir, appname)
if os.path.exists(dest_path):
msg = f"{color.YELLOW}remove previous Julia installation:"
msg += f" {dest_path}{color.END}"
print(msg)
shutil.rmtree(dest_path)
# preserve lib symlinks, otherwise it might cause troubles
# see also: https://github.com/JuliaGPU/CUDA.jl/issues/249
shutil.copytree(src_path, dest_path, symlinks=True)
print(f"{color.GREEN}install Julia to {dest_path}{color.END}")
bin_path = os.path.join(dest_path,
"Contents", "Resources", "julia", "bin", "julia")
make_symlinks(bin_path, symlink_dir, version)
if upgrade:
copy_root_project(version)
return True
def install_julia_exe(package_path,
install_dir,
symlink_dir,
version,
upgrade):
check_installer(package_path, ".exe")
dest_path = os.path.join(install_dir,
f"julia-{f_minor_version(version)}")
if os.path.exists(dest_path):
shutil.rmtree(dest_path, ignore_errors=True)
msg = f"{color.YELLOW}remove previous Julia installation:"
msg += f" {dest_path}{color.END}"
print(msg)
# build system changes for windows after 1.4
# https://github.com/JuliaLang/julia/blob/release-1.4/NEWS.md#build-system-changes
if Version(version).next_patch() < Version("1.4.0"):
# it's always false if version == "latest"
subprocess.check_output([f'{package_path}',
'/S', f'/D={dest_path}'])
else:
subprocess.check_output([f'{package_path}',
'/VERYSILENT',
f'/DIR={dest_path}'])
print(f"{color.GREEN}install Julia to {dest_path}{color.END}")
bin_path = os.path.join(dest_path, "bin", "julia.exe")
make_symlinks(bin_path, symlink_dir, version)
if upgrade:
copy_root_project(version)
return True
def hello_msg():
msg = f"{color.BOLD}JILL - Julia Installer 4 Linux"
msg += f" (MacOS, Windows and FreeBSD) -- Light{color.END}\n"
print(msg)
def | (version=None, *,
install_dir=None,
symlink_dir=None,
upgrade=False,
upstream=None,
unstable=False,
keep_downloads=False,
confirm=False,
reinstall=False):
"""
Install the Julia programming language for your current system
`jill install [version]` would satisfy most of your use cases, try it first
and then read description of other arguments. `version` is optional, valid
version syntax for it is:
* `stable`: latest stable Julia release. This is the _default_ option.
* `1`: latest `1.y.z` Julia release.
* `1.0`: latest `1.0.z` Julia release.
* `1.4.0-rc1`: as it is.
* `latest`/`nightly`: the nightly builds from source code.
For Linux/FreeBSD systems, if you run this command with `root` account,
then it will install Julia system-widely.
To download from a private mirror, please check `jill download -h`.
Arguments:
version:
The Julia version you want to install.
upstream:
manually choose a download upstream. For example, set it to "Official"
if you want to download from JuliaComputing's s3 buckets.
upgrade:
add `--upgrade` flag also copy the root environment from an older
Julia version.
unstable:
add `--unstable` flag to allow installation of unstable releases for auto version
query. For example, `jill install --unstable` might give you unstable installation
like `1.7.0-beta1`. Note that if you explicitly pass the unstable version, e.g.,
`jill install 1.7.0-beta1`, it will still work.
keep_downloads:
add `--keep_downloads` flag to not remove downloaded releases.
confirm: add `--confirm` flag to skip interactive prompt.
reinstall:
jill will skip the installation if the required Julia version already exists,
add `--reinstall` flag to force the reinstallation.
install_dir:
where you want julia packages installed.
symlink_dir:
where you want symlinks(e.g., `julia`, `julia-1`) placed.
"""
install_dir = install_dir if install_dir else default_install_dir()
install_dir = os.path.abspath(install_dir)
symlink_dir = symlink_dir if symlink_dir else default_symlink_dir()
symlink_dir = os.path.normpath(os.path.abspath(symlink_dir))
system, arch = current_system(), current_architecture()
version = str(version) if (version or str(version) == "0") else ''
version = "latest" if version == "nightly" else version
version = "" if version == "stable" else version
upstream = upstream if upstream else os.environ.get("JILL_UPSTREAM", None)
if system == "linux" and current_libc() == "musl":
# currently Julia tags musl as a system, e.g.,
# https://julialang-s3.julialang.org/bin/musl/x64/1.5/julia-1.5.1-musl-x86_64.tar.gz
system = "musl"
hello_msg()
if system == "winnt":
install_dir = install_dir.replace("\\\\", "\\").strip('\'"')
if not confirm:
version_str = version if version else "latest stable release"
question = "jill will:\n"
question += f" 1) install Julia {version_str} for {system}-{arch}"
question += f" into {color.UNDERLINE}{install_dir}{color.END}\n"
question += f" 2) make symlinks in {color.UNDERLINE}{symlink_dir}{color.END}\n"
question += f"You may need to manually add {color.UNDERLINE}{symlink_dir}{color.END} to PATH\n"
question += "Continue installation?"
to_continue = query_yes_no(question)
if not to_continue:
return False
if upstream:
verify_upstream(upstream)
wrong_args = False
try:
version = latest_version(
version, system, arch, upstream=upstream, stable_only=not unstable)
except ValueError:
# hide the nested error stack :P
wrong_args = True
if wrong_args:
msg = f"wrong version(>= 0.6.0) argument: {version}\n"
msg += f"Example: `jill install 1`"
raise(ValueError(msg))
if not reinstall and is_installed(version):
print(f"julia {version} already installed.")
return True
overwrite = True if version == "latest" else False
print(f"{color.BOLD}----- Download Julia -----{color.END}")
package_path = download_package(version, system, arch,
upstream=upstream,
overwrite=overwrite)
if not package_path:
return False
if package_path.endswith(".dmg"):
installer = install_julia_dmg
elif package_path.endswith(".tar.gz"):
installer = install_julia_tarball
elif package_path.endswith(".exe"):
installer = install_julia_exe
else:
print(f"{color.RED}Unsupported file format for {package_path}{color.END}.")
print(f"{color.BOLD}----- Install Julia -----{color.END}")
installer(package_path, install_dir, symlink_dir, version, upgrade)
if not keep_downloads:
print(f"{color.BOLD}----- Post Installation -----{color.END}")
print("remove downloaded files...")
print(f"remove {package_path}")
os.remove(package_path)
gpg_signature_file = package_path + ".asc"
if os.path.exists(gpg_signature_file):
print(f"remove {gpg_signature_file}")
os.remove(gpg_signature_file)
print(f"{color.GREEN}Done!{color.END}")
| install_julia |
editor.test.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as assert from 'assert';
import { TPromise } from 'vs/base/common/winjs.base';
import { EditorInput, toResource } from 'vs/workbench/common/editor';
import { DiffEditorInput } from 'vs/workbench/common/editor/diffEditorInput';
import { IEditorModel } from 'vs/platform/editor/common/editor';
import { URI } from 'vs/base/common/uri';
import { IUntitledEditorService, UntitledEditorService } from 'vs/workbench/services/untitled/common/untitledEditorService';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { workbenchInstantiationService } from 'vs/workbench/test/workbenchTestServices';
import { Schemas } from 'vs/base/common/network';
class | {
constructor(@IUntitledEditorService public untitledEditorService: UntitledEditorService) {
}
}
class FileEditorInput extends EditorInput {
constructor(private resource: URI) {
super();
}
getTypeId(): string {
return 'editorResourceFileTest';
}
getResource(): URI {
return this.resource;
}
resolve(): TPromise<IEditorModel> {
return TPromise.as(null);
}
}
suite('Workbench editor', () => {
let instantiationService: IInstantiationService;
let accessor: ServiceAccessor;
setup(() => {
instantiationService = workbenchInstantiationService();
accessor = instantiationService.createInstance(ServiceAccessor);
});
teardown(() => {
accessor.untitledEditorService.revertAll();
accessor.untitledEditorService.dispose();
});
test('toResource', function () {
const service = accessor.untitledEditorService;
assert.ok(!toResource(null));
const untitled = service.createOrGet();
assert.equal(toResource(untitled).toString(), untitled.getResource().toString());
assert.equal(toResource(untitled, { supportSideBySide: true }).toString(), untitled.getResource().toString());
assert.equal(toResource(untitled, { filter: Schemas.untitled }).toString(), untitled.getResource().toString());
assert.equal(toResource(untitled, { filter: [Schemas.file, Schemas.untitled] }).toString(), untitled.getResource().toString());
assert.ok(!toResource(untitled, { filter: Schemas.file }));
const file = new FileEditorInput(URI.file('/some/path.txt'));
assert.equal(toResource(file).toString(), file.getResource().toString());
assert.equal(toResource(file, { supportSideBySide: true }).toString(), file.getResource().toString());
assert.equal(toResource(file, { filter: Schemas.file }).toString(), file.getResource().toString());
assert.equal(toResource(file, { filter: [Schemas.file, Schemas.untitled] }).toString(), file.getResource().toString());
assert.ok(!toResource(file, { filter: Schemas.untitled }));
const diffEditorInput = new DiffEditorInput('name', 'description', untitled, file);
assert.ok(!toResource(diffEditorInput));
assert.ok(!toResource(diffEditorInput, { filter: Schemas.file }));
assert.ok(!toResource(diffEditorInput, { supportSideBySide: false }));
assert.equal(toResource(file, { supportSideBySide: true }).toString(), file.getResource().toString());
assert.equal(toResource(file, { supportSideBySide: true, filter: Schemas.file }).toString(), file.getResource().toString());
assert.equal(toResource(file, { supportSideBySide: true, filter: [Schemas.file, Schemas.untitled] }).toString(), file.getResource().toString());
});
}); | ServiceAccessor |
match-beginning-vert.rs | // run-pass
enum | {
A,
B,
C,
D,
E,
}
use Foo::*;
fn main() {
for foo in &[A, B, C, D, E] {
match *foo {
| A => println!("A"),
| B | C if 1 < 2 => println!("BC!"),
| _ => {},
}
}
}
| Foo |
views.py | from users.models import Users
from .serializers import UsersSerializer
from rest_framework import viewsets
from django.contrib.auth.hashers import make_password
from rest_framework.response import Response
from rest_framework.decorators import api_view, action
from users.models import Users
from clients.models import Clients
from projects.models import Projects
from tasks.models import Tasks
from django.core.paginator import Paginator
from rest_framework import status
from django.shortcuts import get_object_or_404
from django.conf import settings
import shutil
import os
@api_view(['POST'])
def multiDelete(request):
userIds = request.data.get('userId')
users = Users.objects.filter(pk__in=userIds)
for user in users:
if user:
user.delete()
return Response({
"res": "success"
})
@api_view(['GET'])
def | (request):
resClients = []
clients = Clients.objects.all()
clicnt = 1
for client in clients:
if client:
resClients.append({
"num": clicnt,
"id": client.id,
"name": client.name,
})
clicnt += 1
prjcnt = 1
resProjects = []
projects = Projects.objects.all()
for project in projects:
if project:
resProjects.append({
"num": prjcnt,
"id": project.id,
"name": project.name,
"data_type": project.data_type
})
prjcnt += 1
taskcnt = 1
resTasklist = []
tasklist = Tasks.objects.all()
for task in tasklist:
if task:
resTasklist.append({
"num": taskcnt,
"id": task.id,
"name": task.task_id,
"project_name": task.project.name,
"datatype": task.project.data_type,
"items": task.nitems,
"manager": str(task.assigned_manager)
})
taskcnt += 1
return Response({
"clients": resClients,
"projects": resProjects,
"tasklist": resTasklist,
})
@api_view(['POST'])
def updateUser(request):
userid = request.data['id']
user = Users.objects.get(pk=userid)
res = {"result": "failed"}
if user:
user.username = request.data['username']
user.first_name = request.data['first_name']
user.last_name = request.data['last_name']
user.email = request.data['email']
user.type_user = request.data['type_user']
user.password = make_password(request.data['password'])
user.save()
res = {"result": "success"}
return Response(res)
@api_view(['GET'])
def getCurrentClientData(request):
page = request.GET.get('page')
page_limit = request.GET.get('limit')
resClients = []
client_list = Clients.objects.all()
paginator = Paginator(client_list, page_limit)
clients = paginator.get_page(page)
clicnt = 1
for client in clients:
if client:
resClients.append({
"num": clicnt,
"id": client.id,
"name": client.name,
})
clicnt += 1
return Response({
"clients": resClients,
"totalRecords": len(client_list.values()),
"pageLimit": int(page_limit),
})
class UsersViewSet(viewsets.ModelViewSet):
serializer_class = UsersSerializer
queryset = Users.objects.all()
def create(self, request):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data
user['password'] = make_password(user['password'])
user = serializer.save()
print(user)
return Response({
"user": UsersSerializer(user, context=self.get_serializer_context()).data
})
def destroy(self, request, pk=None):
user_id = self.kwargs.get('pk')
user = get_object_or_404(Users, pk=user_id)
user_data_path = settings.MEDIA_ROOT + '/user_data/' + str(user.user_id)
if os.path.exists(user_data_path):
shutil.rmtree(user_data_path)
user.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| adminDashboardData |
app.module.ts | import { Module } from "@nestjs/common";
import { AppController } from "./app.controller";
import { AppService } from "./app.service";
import { TasksModule } from "./tasks/tasks.module";
import { TypeOrmModule } from "@nestjs/typeorm";
import { typeormConfig } from "./config/typeorm.config";
import { AuthModule } from './auth/auth.module';
@Module({
imports: [TasksModule, TypeOrmModule.forRoot(typeormConfig), AuthModule], | })
export class AppModule {} | controllers: [AppController],
providers: [AppService] |
lib.rs | pub mod instruction;
pub mod processor;
pub mod state;
pub mod utils;
use solana_program::{
account_info::AccountInfo,
entrypoint,
entrypoint::ProgramResult,
pubkey::Pubkey,
};
entrypoint!(process_instruction);
solana_program::declare_id!("A2zNDj1tMdLscxaNzLetdUVRi6E6Jjr54iaQkk7axMcG");
pub fn process_instruction(
program_id: &Pubkey,
accounts: &[AccountInfo],
instruction_data: &[u8],
) -> ProgramResult { | crate::processor::process_instruction(program_id, accounts, instruction_data)
} |
|
read_xml_export.py | """ read_xml_export.py
REPOSITORY:
https://github.com/DavidJLambert/Two-Windows-Event-Log-Summarizers
SUMMARY:
Scans XML exports of the Windows Event Log and reports summary statistics.
AUTHOR:
David J. Lambert
VERSION:
0.1.1
DATE:
July 10, 2020
"""
# -------- IMPORTS.
from __future__ import print_function
import xml.etree.ElementTree
import win32security
from frozendict import frozendict
import glob
from zipfile import ZipFile
import os
# -------- CODE.
def handle_files() -> None:
""" Driver program. Find XML files in current directory.
Args:
none.
Returns:
none.
Raises:
none.
"""
# Read and parse XML file(s). First try to unzip any zipped files.
xml_zip_files = glob.glob('./sample_data/*.xml.zip')
if len(xml_zip_files) > 0:
for xml_zip_file in xml_zip_files:
with ZipFile(xml_zip_file, "r") as f:
unzipped_name = xml_zip_file.replace(".zip", "")
unzipped_exists = os.path.isfile(unzipped_name)
if not unzipped_exists:
f.extractall("./sample_data")
# Read and parse XML file(s).
xml_files = glob.glob('./sample_data/*.xml')
if len(xml_files) == 0:
print("### No XML files to process.")
exit(1)
output_start = "#"*10 + " "*2
last_xml_file = xml_files[-1]
for xml_file in xml_files:
print("\n{}STARTING FILE '{}'.".format(output_start, xml_file[2:]))
tree = xml.etree.ElementTree.parse(xml_file)
events_root = tree.getroot()
analyze_one_file(events_root)
print("\n{}END OF FILE '{}'.".format(output_start, xml_file[2:]))
if xml_file != last_xml_file:
del events_root
del tree
# End of function handle_files.
def analyze_one_file(events_root) -> None:
""" Main analysis. Go thru one file, compile statistics on contents.
Args:
events_root (object): root of the current XML node tree.
Returns:
none.
Raises:
none.
"""
# Get tag_root, the start of the tag for each node in this XML tree.
tag_root = events_root[0].tag.replace("Event", "")
# To count children of level 2 node "event_node" (tag = tag_root+"Event").
count_children = False
if count_children:
count_branch = {"Count": 0}
# Nodes from subtree of level 3 node "sys_root".
sys_nodes = {"EventID", "Version", "Level", "Task", "Opcode", "Keywords",
"Channel", "Computer"}
# Fields of "Provider" node in subtree of level 3 node "sys_root".
provider_fields = {"Name", "Guid", "EventSourceName"}
# Nodes from subtree of level 3 node "render_root".
render_nodes = {"Level", "Task", "Opcode", "Channel", "Provider"}
# Map names in "sys_nodes" and "render_nodes" to Event Viewer field names.
view_name = {"Provider": "Provider", "Channel": "Log Name"}
for node in sys_nodes:
if node != "Channel":
view_name[node] = node
# Map names in "provider_fields" to field names seen in Event Viewer.
view_name["Name"] = "Provider"
view_name["Guid"] = "Guid"
view_name["EventSourceName"] = "Source Name"
# Map names in "security_node" to field names seen in Event Viewer.
view_name["UserID"] = "User Name"
# Event summary. The keys are values of "view_name".
event_summary = dict.fromkeys(view_name.values())
# Where we compile event statistics.
event_stats = {}
# Iterate over all records in the exported XML file.
for event_node in events_root:
# Count children of level 2 node "event_node" (tag = tag_root+"Event").
if count_children:
count_branch["Count"] += 1
for child_node in event_node:
branch = child_node.tag
if branch not in count_branch:
count_branch[branch] = 1
else:
count_branch[branch] += 1
# The level 2 node "event_node" can have children with these tags:
# tag_root+"EventData", tag_root+"RenderingInfo", tag_root+"System",
# and tag_root+"UserData"
# Each event always has a child with tag = tag_root+"System".
# Each event always has a child with tag = tag_root+"EventData"
# or tag = tag_root+"UserData".
# The level 3 node "sys_root", with tag = tag_root+"System".
sys_root = event_node.find(tag_root + "System")
# Get info from child nodes of level 3 node "sys_root".
for node in sys_nodes:
event_summary[view_name[node]] = find_field(sys_root, node,
tag_root)
# Fields of the "Provider" node. | provider_node = sys_root.find(tag_root + "Provider")
for field in provider_fields:
event_summary[view_name[field]] = sanitize(provider_node.get(field))
# Fields of the "Security" node.
security_node = sys_root.find(tag_root + "Security")
event_summary["User Name"] = get_user_name(security_node.get("UserID"))
# Level 3 node "render_root" (tag=tag_root+"RenderingInfo").
render_root = event_node.find(tag_root + "RenderingInfo")
if render_root is not None:
# Get info from child nodes of level 3 node "render_root".
for node in render_nodes:
value = sanitize(find_field(render_root, node, tag_root))
if value != "None":
event_summary[view_name[node]] = value
# Fields of the "Keywords" node.
keywords_node = render_root.find(tag_root + "Keywords")
value = ""
if keywords_node is not None:
for keyword in keywords_node:
text = sanitize(keyword.text)
if text != "None":
if value == "":
value = text
else:
value += " " + text
if value != "":
event_summary["Keywords"] = value
# Translating int to str not done in "render_root", or no "render_root".
event_summary["Opcode"] = opcode_name(event_summary["Opcode"])
event_summary["Level"] = level_name(event_summary["Level"])
event_summary["Keywords"] = keywords_name(event_summary["Keywords"])
# print(event_summary)
if frozendict(event_summary) in event_stats.keys():
event_stats[frozendict(event_summary)] += 1
else:
event_stats[frozendict(event_summary)] = 1
# The count of the children of level 2 node "event_node".
if count_children:
print(count_branch)
# Print event stats
for event_summary, count in sorted(event_stats.items(), reverse=True,
key=lambda item: item[1]):
print("\n## {} occurrences of this event:".format(count))
for key, value in event_summary.items():
print(key + ": " + value)
# End of function analyze_one_file.
def find_field(child, field_name: str, tag_root: str) -> str:
""" Fetch specific fields of the child nodes of current the XML node.
Args:
child (object): child of node that may have field = field_name.
field_name (str): name of field of XML node.
tag_root (str): start of tag of each XML node in tree.
Returns:
text (str): text of the field with "field_name".
Raises:
none.
"""
field = child.find(tag_root + field_name)
if field is None:
return ""
else:
return sanitize(field.text)
# End of function find_field.
def sanitize(this) -> str:
""" Convert object to string.
Args:
this (object).
Returns:
str(this) (str)
Raises:
none.
"""
if this is None:
return "None"
else:
return str(this)
# End of function sanitize.
def get_user_name(sid) -> str:
""" Translate from User SID to User Name.
Args:
PySID (object): contains a user's SID
(See http://timgolden.me.uk/pywin32-docs/win32security.html).
Returns:
username (str): Windows user name with argument's SID.
Raises:
none.
"""
if sid is None:
return "None"
else:
py_sid = win32security.GetBinarySid(sid)
return win32security.LookupAccountSid(None, py_sid)[0]
# End of function get_user_name.
def level_name(level: str) -> str:
""" Translate 'Level' Event Log field from int to descriptive string.
Args:
level (str(int)): severity level of event.
Returns:
severity (str).
Raises:
none.
"""
name = {"0": "Information",
"1": "Critical",
"2": "Error",
"3": "Warning",
"4": "Information",
"5": "Verbose"}
if level in name.keys():
return name[level]
else:
return sanitize(level)
# End of function level_name.
def opcode_name(opcode: str) -> str:
""" Translate 'Opcode' Event Log field from int to descriptive string.
Args:
Opcode (str(int)): event operation code.
Returns:
operation description (string).
Raises:
none.
"""
""" Obtained by correlating values of 'Opcode' in the System and
RenderingInfo subtrees.
Made sure each value in System subtree always associated with same value
in RenderingInfo subtree (not true of 'Task' field!).
Sometimes two values in System subtree have same string in RenderingInfo
subtree, these repetitions are not typos.
"""
name = {"": "Info", "0": "Info",
"1": "Start", "2": "Stop",
"12": "Download", "13": "Installation",
"62": "ServiceStart", "63": "ServiceStop",
"68": "ServiceStart", "69": "ServiceStop",
"104": "ServiceStopWithRefCount", "129": "ServiceShutdown"}
if opcode in name.keys():
return name[opcode]
else:
return sanitize(opcode)
# End of function opcode_name.
def keywords_name(keywords: str) -> str:
""" Translate 'Keywords' Event Log field from hex to descriptive string.
Args:
keywords (str): hexidecimal string.
Returns:
keywords_name (str): keyword(s) corresponding to hexidecimal arg.
Raises:
none.
"""
""" Obtained by correlating values of 'Keywords' field in the System subtree
with the 'Keywords' subtree in the RenderingInfo subtrees
Made sure each value in System subtree always associated with same value
in RenderingInfo subtree (not true of 'Task' field!).
Sometimes two values in System subtree have same string in RenderingInfo
subtree, repetitions are not typos.
"""
name = {"0x80000000000000": "Classic",
"0x4000400000000001": "Core Events",
"0x4000400000000002": "Helper Class Events",
"0x8000000000000010": "Time",
"0x8000000000000018": "Installation Success",
"0x8000000000000028": "Installation Failure",
"0x8000000000002004": "Download Started",
"0x8000000000002008": "Installation Started",
"0x8001000000000001": "Performance, Response Time",
"0x8080000000000000": "Classic"}
if keywords in name.keys():
return name[keywords]
else:
return sanitize(keywords)
# End of function keywords_name.
def flatten(node, tag_root) -> None:
""" Flattens subtree of a node.
Args:
node (object): XML tree subtree.
tag_root: ?
Returns:
none.
Raises:
none.
"""
""" Demo of flattening the subtree of the given node. Alternative method
of walking node tree. Elegant, but not as efficient.
"""
for child in node.iter():
tag = child.tag.replace(tag_root, "").strip()
child_text = child.text
if child_text is not None:
print(tag + ": " + child_text.strip())
if len(child.attrib):
for key, value in child.attrib.items():
print(tag + "-" + key.strip() + ": " + value.strip())
# End of function flatten.
if __name__ == '__main__':
handle_files() | |
IoTHub_IoTHub_Lifecycle_Test_Suite_should_delete_the_iothub_successfully.nock.js | // This file has been autogenerated.
exports.setEnvironment = function() {
process.env['AZURE_SUBSCRIPTION_ID'] = 'e0b81f36-36ba-44f7-b550-7c9344a35893';
};
exports.scopes = [[function (nock) {
var result =
nock('http://management.azure.com:443')
.delete('/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub?api-version=2016-02-03')
.reply(202, "null", { 'cache-control': 'no-cache',
pragma: 'no-cache',
'content-length': '4',
'content-type': 'application/json; charset=utf-8',
expires: '-1',
location: 'https://management.azure.com/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03',
'retry-after': '15',
'azure-asyncoperation': 'https://management.azure.com/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03&asyncinfo',
server: 'Microsoft-HTTPAPI/2.0',
'x-ms-ratelimit-remaining-subscription-writes': '1199',
'x-ms-request-id': '485dfb2b-cd2a-431c-aaf1-739ebdc39efd',
'x-ms-correlation-request-id': '485dfb2b-cd2a-431c-aaf1-739ebdc39efd',
'x-ms-routing-request-id': 'CENTRALUS:20160920T221338Z:485dfb2b-cd2a-431c-aaf1-739ebdc39efd',
'strict-transport-security': 'max-age=31536000; includeSubDomains',
date: 'Tue, 20 Sep 2016 22:13:38 GMT',
connection: 'close' });
return result; },
function (nock) {
var result =
nock('https://management.azure.com:443')
.delete('/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub?api-version=2016-02-03')
.reply(202, "null", { 'cache-control': 'no-cache',
pragma: 'no-cache',
'content-length': '4',
'content-type': 'application/json; charset=utf-8',
expires: '-1',
location: 'https://management.azure.com/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03',
'retry-after': '15',
'azure-asyncoperation': 'https://management.azure.com/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03&asyncinfo',
server: 'Microsoft-HTTPAPI/2.0',
'x-ms-ratelimit-remaining-subscription-writes': '1199',
'x-ms-request-id': '485dfb2b-cd2a-431c-aaf1-739ebdc39efd',
'x-ms-correlation-request-id': '485dfb2b-cd2a-431c-aaf1-739ebdc39efd',
'x-ms-routing-request-id': 'CENTRALUS:20160920T221338Z:485dfb2b-cd2a-431c-aaf1-739ebdc39efd',
'strict-transport-security': 'max-age=31536000; includeSubDomains',
date: 'Tue, 20 Sep 2016 22:13:38 GMT',
connection: 'close' });
return result; },
function (nock) {
var result =
nock('http://management.azure.com:443')
.get('/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03&asyncinfo')
.reply(200, "{\"status\":\"Running\"}", { 'cache-control': 'no-cache',
pragma: 'no-cache',
'content-length': '20', | expires: '-1',
server: 'Microsoft-HTTPAPI/2.0',
'x-ms-ratelimit-remaining-subscription-reads': '14891',
'x-ms-request-id': 'ca79e1c7-235b-4c1d-b5fe-b12a69c8dbdb',
'x-ms-correlation-request-id': 'ca79e1c7-235b-4c1d-b5fe-b12a69c8dbdb',
'x-ms-routing-request-id': 'CENTRALUS:20160920T221409Z:ca79e1c7-235b-4c1d-b5fe-b12a69c8dbdb',
'strict-transport-security': 'max-age=31536000; includeSubDomains',
date: 'Tue, 20 Sep 2016 22:14:08 GMT',
connection: 'close' });
return result; },
function (nock) {
var result =
nock('https://management.azure.com:443')
.get('/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03&asyncinfo')
.reply(200, "{\"status\":\"Running\"}", { 'cache-control': 'no-cache',
pragma: 'no-cache',
'content-length': '20',
'content-type': 'application/json; charset=utf-8',
expires: '-1',
server: 'Microsoft-HTTPAPI/2.0',
'x-ms-ratelimit-remaining-subscription-reads': '14891',
'x-ms-request-id': 'ca79e1c7-235b-4c1d-b5fe-b12a69c8dbdb',
'x-ms-correlation-request-id': 'ca79e1c7-235b-4c1d-b5fe-b12a69c8dbdb',
'x-ms-routing-request-id': 'CENTRALUS:20160920T221409Z:ca79e1c7-235b-4c1d-b5fe-b12a69c8dbdb',
'strict-transport-security': 'max-age=31536000; includeSubDomains',
date: 'Tue, 20 Sep 2016 22:14:08 GMT',
connection: 'close' });
return result; },
function (nock) {
var result =
nock('http://management.azure.com:443')
.get('/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03&asyncinfo')
.reply(200, "{\"status\":\"Succeeded\"}", { 'cache-control': 'no-cache',
pragma: 'no-cache',
'content-length': '22',
'content-type': 'application/json; charset=utf-8',
expires: '-1',
server: 'Microsoft-HTTPAPI/2.0',
'x-ms-ratelimit-remaining-subscription-reads': '14987',
'x-ms-request-id': '9efabe0a-1f98-40d0-9449-c16628f61af7',
'x-ms-correlation-request-id': '9efabe0a-1f98-40d0-9449-c16628f61af7',
'x-ms-routing-request-id': 'CENTRALUS:20160920T221439Z:9efabe0a-1f98-40d0-9449-c16628f61af7',
'strict-transport-security': 'max-age=31536000; includeSubDomains',
date: 'Tue, 20 Sep 2016 22:14:39 GMT',
connection: 'close' });
return result; },
function (nock) {
var result =
nock('https://management.azure.com:443')
.get('/subscriptions/e0b81f36-36ba-44f7-b550-7c9344a35893/resourceGroups/nodetestrg/providers/Microsoft.Devices/IotHubs/nodeTestHub/operationResults/NTVhMjQ3NWYtNTJkNy00NzIxLWJlMDktYWY4N2NhZTVjZmNi?api-version=2016-02-03&asyncinfo')
.reply(200, "{\"status\":\"Succeeded\"}", { 'cache-control': 'no-cache',
pragma: 'no-cache',
'content-length': '22',
'content-type': 'application/json; charset=utf-8',
expires: '-1',
server: 'Microsoft-HTTPAPI/2.0',
'x-ms-ratelimit-remaining-subscription-reads': '14987',
'x-ms-request-id': '9efabe0a-1f98-40d0-9449-c16628f61af7',
'x-ms-correlation-request-id': '9efabe0a-1f98-40d0-9449-c16628f61af7',
'x-ms-routing-request-id': 'CENTRALUS:20160920T221439Z:9efabe0a-1f98-40d0-9449-c16628f61af7',
'strict-transport-security': 'max-age=31536000; includeSubDomains',
date: 'Tue, 20 Sep 2016 22:14:39 GMT',
connection: 'close' });
return result; }]]; | 'content-type': 'application/json; charset=utf-8', |
analysis.rs | // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use super::diagnostics::DenoDiagnostic;
use super::documents::Documents;
use super::language_server;
use super::tsc;
use crate::config_file::LintConfig;
use crate::tools::lint::create_linter;
use crate::tools::lint::get_configured_rules;
use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use lspower::lsp;
use lspower::lsp::Position;
use lspower::lsp::Range;
use once_cell::sync::Lazy;
use regex::Regex;
use std::cmp::Ordering;
use std::collections::HashMap;
/// Diagnostic error codes which actually are the same, and so when grouping
/// fixes we treat them the same.
static FIX_ALL_ERROR_CODES: Lazy<HashMap<&'static str, &'static str>> =
Lazy::new(|| {
(&[("2339", "2339"), ("2345", "2339")])
.iter()
.cloned()
.collect()
});
/// Fixes which help determine if there is a preferred fix when there are
/// multiple fixes available.
static PREFERRED_FIXES: Lazy<HashMap<&'static str, (u32, bool)>> =
Lazy::new(|| {
(&[
("annotateWithTypeFromJSDoc", (1, false)),
("constructorForDerivedNeedSuperCall", (1, false)),
("extendsInterfaceBecomesImplements", (1, false)),
("awaitInSyncFunction", (1, false)),
("classIncorrectlyImplementsInterface", (3, false)),
("classDoesntImplementInheritedAbstractMember", (3, false)),
("unreachableCode", (1, false)),
("unusedIdentifier", (1, false)),
("forgottenThisPropertyAccess", (1, false)),
("spelling", (2, false)),
("addMissingAwait", (1, false)),
("fixImport", (0, true)),
])
.iter()
.cloned()
.collect()
});
static IMPORT_SPECIFIER_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"\sfrom\s+["']([^"']*)["']"#).unwrap());
const SUPPORTED_EXTENSIONS: &[&str] = &[".ts", ".tsx", ".js", ".jsx", ".mjs"];
/// Category of self-generated diagnostic messages (those not coming from)
/// TypeScript.
#[derive(Debug, PartialEq, Eq)]
pub enum Category {
/// A lint diagnostic, where the first element is the message.
Lint {
message: String,
code: String,
hint: Option<String>,
},
}
/// A structure to hold a reference to a diagnostic message.
#[derive(Debug, PartialEq, Eq)]
pub struct Reference {
category: Category,
range: Range,
}
impl Reference {
pub fn to_diagnostic(&self) -> lsp::Diagnostic {
match &self.category {
Category::Lint {
message,
code,
hint,
} => lsp::Diagnostic {
range: self.range,
severity: Some(lsp::DiagnosticSeverity::WARNING),
code: Some(lsp::NumberOrString::String(code.to_string())),
code_description: None,
source: Some("deno-lint".to_string()),
message: {
let mut msg = message.to_string();
if let Some(hint) = hint {
msg.push('\n');
msg.push_str(hint);
}
msg
},
related_information: None,
tags: None, // we should tag unused code
data: None,
},
}
}
}
fn as_lsp_range(range: &deno_lint::diagnostic::Range) -> Range {
Range {
start: Position {
line: range.start.line_index as u32,
character: range.start.column_index as u32,
},
end: Position {
line: range.end.line_index as u32,
character: range.end.column_index as u32,
},
}
}
pub fn get_lint_references(
parsed_source: &deno_ast::ParsedSource,
maybe_lint_config: Option<&LintConfig>,
) -> Result<Vec<Reference>, AnyError> {
let lint_rules = get_configured_rules(maybe_lint_config, None, None, None)?;
let linter = create_linter(parsed_source.media_type(), lint_rules);
let lint_diagnostics = linter.lint_with_ast(parsed_source);
Ok(
lint_diagnostics
.into_iter()
.map(|d| Reference {
category: Category::Lint {
message: d.message,
code: d.code,
hint: d.hint,
},
range: as_lsp_range(&d.range),
})
.collect(),
)
}
fn code_as_string(code: &Option<lsp::NumberOrString>) -> String {
match code {
Some(lsp::NumberOrString::String(str)) => str.clone(),
Some(lsp::NumberOrString::Number(num)) => num.to_string(),
_ => "".to_string(),
}
}
/// Iterate over the supported extensions, concatenating the extension on the
/// specifier, returning the first specifier that is resolve-able, otherwise
/// None if none match.
fn check_specifier(
specifier: &str,
referrer: &ModuleSpecifier,
documents: &Documents,
) -> Option<String> {
for ext in SUPPORTED_EXTENSIONS {
let specifier_with_ext = format!("{}{}", specifier, ext);
if documents.contains_import(&specifier_with_ext, referrer) {
return Some(specifier_with_ext);
}
}
None
}
/// For a set of tsc changes, can them for any that contain something that looks
/// like an import and rewrite the import specifier to include the extension
pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier,
changes: &[tsc::FileTextChanges],
documents: &Documents,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let mut r = Vec::new();
for change in changes {
let mut text_changes = Vec::new();
for text_change in &change.text_changes {
if let Some(captures) =
IMPORT_SPECIFIER_RE.captures(&text_change.new_text)
{
let specifier = captures
.get(1)
.ok_or_else(|| anyhow!("Missing capture."))?
.as_str();
if let Some(new_specifier) =
check_specifier(specifier, referrer, documents)
{
let new_text =
text_change.new_text.replace(specifier, &new_specifier);
text_changes.push(tsc::TextChange {
span: text_change.span.clone(),
new_text,
});
} else {
text_changes.push(text_change.clone());
}
} else {
text_changes.push(text_change.clone());
}
}
r.push(tsc::FileTextChanges {
file_name: change.file_name.clone(),
text_changes,
is_new_file: change.is_new_file,
});
}
Ok(r)
}
/// Fix tsc import code actions so that the module specifier is correct for
/// resolution by Deno (includes the extension).
fn fix_ts_import_action(
referrer: &ModuleSpecifier,
action: &tsc::CodeFixAction,
documents: &Documents,
) -> Result<tsc::CodeFixAction, AnyError> {
if action.fix_name == "import" {
let change = action
.changes
.get(0)
.ok_or_else(|| anyhow!("Unexpected action changes."))?;
let text_change = change
.text_changes
.get(0)
.ok_or_else(|| anyhow!("Missing text change."))?;
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)
{
let specifier = captures
.get(1)
.ok_or_else(|| anyhow!("Missing capture."))?
.as_str();
if let Some(new_specifier) =
check_specifier(specifier, referrer, documents)
{
let description = action.description.replace(specifier, &new_specifier);
let changes = action
.changes
.iter()
.map(|c| {
let text_changes = c
.text_changes
.iter()
.map(|tc| tsc::TextChange {
span: tc.span.clone(),
new_text: tc.new_text.replace(specifier, &new_specifier),
})
.collect();
tsc::FileTextChanges {
file_name: c.file_name.clone(),
text_changes,
is_new_file: c.is_new_file,
}
})
.collect();
return Ok(tsc::CodeFixAction {
description,
changes,
commands: None,
fix_name: action.fix_name.clone(),
fix_id: None,
fix_all_description: None,
});
}
}
}
Ok(action.clone())
}
/// Determines if two TypeScript diagnostic codes are effectively equivalent.
fn is_equivalent_code(
a: &Option<lsp::NumberOrString>,
b: &Option<lsp::NumberOrString>,
) -> bool {
let a_code = code_as_string(a);
let b_code = code_as_string(b);
FIX_ALL_ERROR_CODES.get(a_code.as_str())
== FIX_ALL_ERROR_CODES.get(b_code.as_str())
}
/// Return a boolean flag to indicate if the specified action is the preferred
/// action for a given set of actions.
fn is_preferred(
action: &tsc::CodeFixAction,
actions: &[CodeActionKind],
fix_priority: u32,
only_one: bool,
) -> bool {
actions.iter().all(|i| {
if let CodeActionKind::Tsc(_, a) = i {
if action == a {
return true;
}
if a.fix_id.is_some() {
return true;
}
if let Some((other_fix_priority, _)) =
PREFERRED_FIXES.get(a.fix_name.as_str())
{
match other_fix_priority.cmp(&fix_priority) {
Ordering::Less => return true,
Ordering::Greater => return false,
Ordering::Equal => (),
}
if only_one && action.fix_name == a.fix_name {
return false;
}
}
true
} else {
true
}
})
}
/// Convert changes returned from a TypeScript quick fix action into edits
/// for an LSP CodeAction.
pub async fn ts_changes_to_edit(
changes: &[tsc::FileTextChanges],
language_server: &language_server::Inner,
) -> Result<Option<lsp::WorkspaceEdit>, AnyError> {
let mut text_document_edits = Vec::new();
for change in changes {
let text_document_edit =
change.to_text_document_edit(language_server).await?;
text_document_edits.push(text_document_edit);
}
Ok(Some(lsp::WorkspaceEdit {
changes: None,
document_changes: Some(lsp::DocumentChanges::Edits(text_document_edits)),
change_annotations: None,
}))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CodeActionData {
pub specifier: ModuleSpecifier,
pub fix_id: String,
}
#[derive(Debug, Clone)]
enum CodeActionKind {
Deno(lsp::CodeAction),
DenoLint(lsp::CodeAction),
Tsc(lsp::CodeAction, tsc::CodeFixAction),
}
#[derive(Debug, Hash, PartialEq, Eq)]
enum FixAllKind {
Tsc(String),
}
#[derive(Debug, Default)]
pub struct CodeActionCollection {
actions: Vec<CodeActionKind>,
fix_all_actions: HashMap<FixAllKind, CodeActionKind>,
}
impl CodeActionCollection {
pub fn add_deno_fix_action(
&mut self,
specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic,
) -> Result<(), AnyError> {
let code_action = DenoDiagnostic::get_code_action(specifier, diagnostic)?;
self.actions.push(CodeActionKind::Deno(code_action));
Ok(())
}
pub fn add_deno_lint_ignore_action(
&mut self,
specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic,
maybe_text_info: Option<SourceTextInfo>,
maybe_parsed_source: Option<deno_ast::ParsedSource>,
) -> Result<(), AnyError> {
let code = diagnostic
.code
.as_ref()
.map(|v| match v {
lsp::NumberOrString::String(v) => v.to_owned(),
_ => "".to_string(),
})
.unwrap();
let line_content = maybe_text_info.map(|ti| {
ti.line_text(diagnostic.range.start.line as usize)
.to_string()
});
let mut changes = HashMap::new();
changes.insert(
specifier.clone(),
vec![lsp::TextEdit {
new_text: prepend_whitespace(
format!("// deno-lint-ignore {}\n", code),
line_content,
),
range: lsp::Range {
start: lsp::Position {
line: diagnostic.range.start.line,
character: 0,
},
end: lsp::Position {
line: diagnostic.range.start.line,
character: 0,
},
},
}],
);
let ignore_error_action = lsp::CodeAction {
title: format!("Disable {} for this line", code),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: None,
is_preferred: None,
disabled: None,
data: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some(changes),
change_annotations: None,
document_changes: None,
}),
};
self
.actions
.push(CodeActionKind::DenoLint(ignore_error_action));
// Disable a lint error for the entire file.
let maybe_ignore_comment = maybe_parsed_source.clone().and_then(|ps| {
// Note: we can use ps.get_leading_comments() but it doesn't
// work when shebang is present at the top of the file.
ps.comments().get_vec().iter().find_map(|c| {
let comment_text = c.text.trim();
comment_text.split_whitespace().next().and_then(|prefix| {
if prefix == "deno-lint-ignore-file" {
Some(c.clone())
} else {
None
}
})
})
});
let mut new_text = format!("// deno-lint-ignore-file {}\n", code);
let mut range = lsp::Range {
start: lsp::Position {
line: 0,
character: 0,
},
end: lsp::Position {
line: 0,
character: 0,
},
};
// If ignore file comment already exists, append the lint code
// to the existing comment.
if let Some(ignore_comment) = maybe_ignore_comment {
new_text = format!(" {}", code);
// Get the end position of the comment.
let line = maybe_parsed_source
.unwrap()
.source()
.line_and_column_index(ignore_comment.span.hi());
let position = lsp::Position {
line: line.line_index as u32,
character: line.column_index as u32,
};
// Set the edit range to the end of the comment.
range.start = position;
range.end = position;
}
let mut changes = HashMap::new();
changes.insert(specifier.clone(), vec![lsp::TextEdit { new_text, range }]);
let ignore_file_action = lsp::CodeAction {
title: format!("Disable {} for the entire file", code),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: None,
is_preferred: None,
disabled: None,
data: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some(changes),
change_annotations: None,
document_changes: None,
}),
};
self
.actions
.push(CodeActionKind::DenoLint(ignore_file_action));
let mut changes = HashMap::new();
changes.insert(
specifier.clone(),
vec![lsp::TextEdit {
new_text: "// deno-lint-ignore-file\n".to_string(),
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 0,
},
end: lsp::Position {
line: 0,
character: 0,
},
},
}],
);
let ignore_file_action = lsp::CodeAction {
title: "Ignore lint errors for the entire file".to_string(),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: None,
is_preferred: None,
disabled: None,
data: None,
edit: Some(lsp::WorkspaceEdit {
changes: Some(changes),
change_annotations: None,
document_changes: None,
}),
};
self
.actions
.push(CodeActionKind::DenoLint(ignore_file_action));
Ok(())
}
/// Add a TypeScript code fix action to the code actions collection.
pub async fn add_ts_fix_action(
&mut self,
specifier: &ModuleSpecifier,
action: &tsc::CodeFixAction,
diagnostic: &lsp::Diagnostic,
language_server: &language_server::Inner,
) -> Result<(), AnyError> {
if action.commands.is_some() {
// In theory, tsc can return actions that require "commands" to be applied
// back into TypeScript. Currently there is only one command, `install
// package` but Deno doesn't support that. The problem is that the
// `.applyCodeActionCommand()` returns a promise, and with the current way
// we wrap tsc, we can't handle the asynchronous response, so it is
// actually easier to return errors if we ever encounter one of these,
// which we really wouldn't expect from the Deno lsp.
return Err(custom_error(
"UnsupportedFix",
"The action returned from TypeScript is unsupported.",
));
}
let action =
fix_ts_import_action(specifier, action, &language_server.documents)?;
let edit = ts_changes_to_edit(&action.changes, language_server).await?;
let code_action = lsp::CodeAction {
title: action.description.clone(),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit,
command: None,
is_preferred: None,
disabled: None,
data: None,
};
self.actions.retain(|i| match i {
CodeActionKind::Tsc(c, a) => {
!(action.fix_name == a.fix_name && code_action.edit == c.edit)
}
_ => true,
});
self
.actions
.push(CodeActionKind::Tsc(code_action, action.clone()));
if let Some(fix_id) = &action.fix_id {
if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) =
self.fix_all_actions.get(&FixAllKind::Tsc(fix_id.clone()))
{
self.actions.retain(|i| match i {
CodeActionKind::Tsc(c, _) => c != existing_fix_all,
_ => true,
});
self.actions.push(CodeActionKind::Tsc(
existing_fix_all.clone(),
existing_action.clone(),
));
}
}
Ok(())
}
/// Add a TypeScript action to the actions as a "fix all" action, where it
/// will fix all occurrences of the diagnostic in the file.
pub fn add_ts_fix_all_action(
&mut self,
action: &tsc::CodeFixAction,
specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic,
) {
let data = Some(json!({
"specifier": specifier,
"fixId": action.fix_id,
}));
let title = if let Some(description) = &action.fix_all_description {
description.clone()
} else {
format!("{} (Fix all in file)", action.description)
};
let code_action = lsp::CodeAction {
title,
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: None,
command: None,
is_preferred: None,
disabled: None,
data,
};
if let Some(CodeActionKind::Tsc(existing, _)) = self
.fix_all_actions
.get(&FixAllKind::Tsc(action.fix_id.clone().unwrap()))
{
self.actions.retain(|i| match i {
CodeActionKind::Tsc(c, _) => c != existing,
_ => true,
});
}
self
.actions
.push(CodeActionKind::Tsc(code_action.clone(), action.clone()));
self.fix_all_actions.insert(
FixAllKind::Tsc(action.fix_id.clone().unwrap()),
CodeActionKind::Tsc(code_action, action.clone()),
);
}
/// Move out the code actions and return them as a `CodeActionResponse`.
pub fn get_response(self) -> lsp::CodeActionResponse {
self
.actions
.into_iter()
.map(|i| match i {
CodeActionKind::Tsc(c, _) => lsp::CodeActionOrCommand::CodeAction(c),
CodeActionKind::Deno(c) => lsp::CodeActionOrCommand::CodeAction(c),
CodeActionKind::DenoLint(c) => lsp::CodeActionOrCommand::CodeAction(c),
})
.collect()
}
/// Determine if a action can be converted into a "fix all" action.
pub fn is_fix_all_action(
&self,
action: &tsc::CodeFixAction,
diagnostic: &lsp::Diagnostic,
file_diagnostics: &[lsp::Diagnostic],
) -> bool {
// If the action does not have a fix id (indicating it can be "bundled up")
// or if the collection already contains a "bundled" action return false
if action.fix_id.is_none()
|| self
.fix_all_actions
.contains_key(&FixAllKind::Tsc(action.fix_id.clone().unwrap()))
{
false
} else {
// else iterate over the diagnostic in the file and see if there are any
// other diagnostics that could be bundled together in a "fix all" code
// action
file_diagnostics.iter().any(|d| {
if d == diagnostic || d.code.is_none() || diagnostic.code.is_none() {
false
} else {
d.code == diagnostic.code
|| is_equivalent_code(&d.code, &diagnostic.code)
}
})
}
}
/// Set the `.is_preferred` flag on code actions, this should be only executed
/// when all actions are added to the collection.
pub fn set_preferred_fixes(&mut self) {
let actions = self.actions.clone();
for entry in self.actions.iter_mut() {
if let CodeActionKind::Tsc(code_action, action) = entry {
if action.fix_id.is_some() {
continue;
}
if let Some((fix_priority, only_one)) =
PREFERRED_FIXES.get(action.fix_name.as_str())
{
code_action.is_preferred =
Some(is_preferred(action, &actions, *fix_priority, *only_one));
}
}
}
}
}
/// Prepend the whitespace characters found at the start of line_content to content.
fn prepend_whitespace(content: String, line_content: Option<String>) -> String {
if let Some(line) = line_content {
let whitespaces =
line.chars().position(|c| !c.is_whitespace()).unwrap_or(0);
let whitespace = &line[0..whitespaces];
format!("{}{}", &whitespace, content)
} else {
content
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn | () {
let range = Range {
start: Position {
line: 1,
character: 1,
},
end: Position {
line: 2,
character: 2,
},
};
let test_cases = [
(
Reference {
category: Category::Lint {
message: "message1".to_string(),
code: "code1".to_string(),
hint: None,
},
range,
},
lsp::Diagnostic {
range,
severity: Some(lsp::DiagnosticSeverity::WARNING),
code: Some(lsp::NumberOrString::String("code1".to_string())),
source: Some("deno-lint".to_string()),
message: "message1".to_string(),
..Default::default()
},
),
(
Reference {
category: Category::Lint {
message: "message2".to_string(),
code: "code2".to_string(),
hint: Some("hint2".to_string()),
},
range,
},
lsp::Diagnostic {
range,
severity: Some(lsp::DiagnosticSeverity::WARNING),
code: Some(lsp::NumberOrString::String("code2".to_string())),
source: Some("deno-lint".to_string()),
message: "message2\nhint2".to_string(),
..Default::default()
},
),
];
for (input, expected) in test_cases.iter() {
let actual = input.to_diagnostic();
assert_eq!(&actual, expected);
}
}
#[test]
fn test_as_lsp_range() {
let fixture = deno_lint::diagnostic::Range {
start: deno_lint::diagnostic::Position {
line_index: 0,
column_index: 2,
byte_pos: 23,
},
end: deno_lint::diagnostic::Position {
line_index: 1,
column_index: 0,
byte_pos: 33,
},
};
let actual = as_lsp_range(&fixture);
assert_eq!(
actual,
lsp::Range {
start: lsp::Position {
line: 0,
character: 2,
},
end: lsp::Position {
line: 1,
character: 0,
},
}
);
}
}
| test_reference_to_diagnostic |
trackbot.py | try:
import logo as logo_print
except ModuleNotFoundError:
missingfile = str(input("The program is missing a file. Continue anyways? ")) | os.exit(0)
try:
from bs4 import BeautifulSoup
import requests, time, re, os, random
from termcolor import colored
from colorama import init
except ModuleNotFoundError and ImportError:
print("The program is missing essential libraries. Read the Github's tutorial how to install all the libraries.")
os.exit(0)
os.system("mode con cols=150 lines=75")
decision = ''
init()
colors = ['red', 'green', 'yellow', 'blue', 'magenta', 'cyan']
# DEFS BELOW
def print_status():
obj = time.localtime()
currentime = time.asctime(obj)
if decision.lower() == 'sample':
pass
else:
print(decision)
time.sleep(a)
source = requests.get('https://www.robloxforum.com').text
soup = BeautifulSoup(source, 'lxml')
stringy = soup.find('span', class_='block-footer-counter').text
usernames = soup.find_all('span', class_=['username--style1', 'username--style2', 'username--style3', 'username--style4',
'username--style5', 'username--style6', 'username--style7', 'username--style8', 'username--style9', 'username--style10'
'username--style11'])
whitespace_remove = stringy.replace(' Robots', "Robots")
print(currentime)
print(whitespace_remove)
for span in usernames:
attr = span.attrs['class']
numbas = re.findall(r'\d+', str(attr))
if numbas[0] == "2":
print(span.text)
elif numbas[0] == "3":
print(colored(span.text, 'red', attrs=['bold']))
elif numbas[0] == "4":
print(colored(span.text, 'blue', attrs=['bold']))
elif numbas[0] == "6":
print(colored(span.text, 'green', attrs=['bold']))
elif numbas[0] == "7":
print(colored(span.text, 'green'))
elif numbas[0] == "8":
print(colored(span.text, 'blue'))
elif numbas[0] == "9":
print(colored(span.text, 'yellow'))
elif numbas[0] == "10":
def strike(text):
return ''.join([u'\u0336{}'.format(c) for c in text])
black = (colored(span.text, 'yellow'))
print(strike(black))
elif numbas[0] == "11":
print(colored(span.text, 'blue', attrs=['bold']))
print('\n')
if decision == 'SAMPLE' or 'sample':
print()
else:
if b.lower() == "y" or "yes" or "yea":
with open("log.txt", "a") as o:
encoded_string = stringy.encode("ascii", "ignore")
decode_string = encoded_string.decode()
whitespace_remove = decode_string.replace(' Robots', "Robots")
o.write(whitespace_remove)
if c.lower() == "y" or "yes" or "yea": o.write(currentime + '\n')
for span in usernames:
attr = span.attrs['class']
numbas = re.findall(r'\d+', str(attr))
sp = span.text
obj = time.localtime()
currentime = time.asctime(obj)
if c.lower() == "y" or "yes" or "yea":
if numbas[0] == "2":
o.write(sp + " | normal user")
o.write('\n')
elif numbas[0] == "3":
o.write(sp + " | administrator")
o.write('\n')
elif numbas[0] == "4":
o.write(sp + " | moderator")
o.write('\n')
elif numbas[0] == "6":
o.write(sp + " | verified")
o.write('\n')
elif numbas[0] == "7":
o.write(sp + " | vip")
o.write('\n')
elif numbas[0] == "8":
o.write(sp + " | pro")
o.write('\n')
elif numbas[0] == "9":
o.write(sp + " | ultra")
o.write('\n')
elif numbas[0] == "10":
o.write(sp + " | banned")
o.write('\n')
o.write('\n')
else:
pass
else:
pass
def run():
process = 1
while process == 1:
print_status()
# DEFS ABOVE
try:
print(colored(logo_print.final_str, random.choice(colors)))
except ModuleNotFoundError:
pass
print(colored("RF trackbot - credits to MATIEO33", 'blue'))
print(colored("RF: https://robloxforum.com/members/matieo33.8832/", 'red'))
print(colored("Github: https://github.com/matieo33", 'green'))
print("Available options: TRACK SAMPLE HELP \n")
if __name__ == '__main__':
in_menu = 1
while in_menu == 1:
decision = str(input())
if decision.lower() == 'help':
print("I made this bot purely for the purpose of entertainment, and if ever happens - maybe also will come in handy for somebody.")
print("Wanna help this bot grow? DM me.")
print('Important: CTRL + C will stop the program entirely! Make sure to answer with "Y" if you wish to save the data to a TXT file.')
print(
"TRACK: Prints the activity of the site per amount of seconds you select.")
print(
"SAMPLE: Prints the activity of the site one time as an example of the program's work.")
elif decision.lower() == 'sample':
print('')
print_status()
elif decision.lower() == 'track':
print('')
in_menu = 0
else:
print("ERROR: unknown command " + "'" + decision + "'")
a = int(input(
"Every how much seconds do you wish to recieve updates on the site activity? "))
b = str(input("Do you wish the data to be saved to a TXT file? "))
if b.lower() == "y" or "yes" or "yea":
c = str(input('Do you wish to include the list of all online users? '))
while 1:
print_status()
else:
while 1:
print_status() | if missingfile.lower() == "yes" or "y" or "yea":
pass
else: |
fileauth.go | // Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package core
import (
"encoding/json"
"fmt"
"github.com/hyperledger/fabric/core/chaincode/shim"
pb "github.com/hyperledger/fabric/protos/peer"
"github.com/PaddlePaddle/PaddleDTX/xdb/blockchain"
"github.com/PaddlePaddle/PaddleDTX/xdb/errorx"
)
// PublishFileAuthApplication add applier's file authorization application into chain
// In order to facilitate the applier or authorizer to query the list of applications,
// the authorization application will be written under the index_fileauth_list of applier and authorizer
func (x *Xdata) PublishFileAuthApplication(stub shim.ChaincodeStubInterface, args []string) pb.Response {
// get PublishFileAuthApplication
if len(args) < 1 {
return shim.Error("invalid arguments. expecting PublishFileAuthOptions")
}
// unmarshal opt
var opt blockchain.PublishFileAuthOptions
if err := json.Unmarshal([]byte(args[0]), &opt); err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal,
"failed to unmarshal PublishFileAuthOptions").Error())
}
fa := opt.FileAuthApplication
s, err := json.Marshal(fa)
if err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal, "failed to marshal FileAuthApplication").Error())
}
// verify signature by applier's public key
err = x.checkSign(opt.Signature, fa.Applier, s)
if err != nil {
return shim.Error(err.Error())
}
fa.Status = blockchain.FileAuthUnapproved
// marshal fileAuthApplication
s, err = json.Marshal(fa)
if err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal,
"fail to marshal fileAuthApplication").Error())
}
// judge if fileAuthIndex exists
fileAuthIndex := packFileAuthIndex(fa.ID)
if resp := x.getValue(stub, []string{fileAuthIndex}); len(resp.Payload) != 0 {
return shim.Error(errorx.New(errorx.ErrCodeAlreadyExists, "duplicated file authID").Error())
}
// put index_fileauth into chain
if resp := x.setValue(stub, []string{fileAuthIndex, string(s)}); resp.Status == shim.ERROR {
return shim.Error(errorx.New(errorx.ErrCodeWriteBlockchain,
"failed to set index_fileauth on chain: %s", resp.Message).Error())
}
// put index_fileauth_list_applier into chain
applierListIndex := packFileAuthApplierIndex(fa.Applier, fa.ID, fa.CreateTime)
if resp := x.setValue(stub, []string{applierListIndex, fa.ID}); resp.Status == shim.ERROR {
return shim.Error(errorx.New(errorx.ErrCodeWriteBlockchain,
"failed to set index_fileauth_list_applier on chain: %s", resp.Message).Error())
}
// put index_fileauth_list_authorizer into chain
authorizerListIndex := packFileAuthAuthorizerIndex(fa.Authorizer, fa.ID, fa.CreateTime)
if resp := x.setValue(stub, []string{authorizerListIndex, fa.ID}); resp.Status == shim.ERROR {
return shim.Error(errorx.New(errorx.ErrCodeWriteBlockchain,
"failed to set index_fileauth_list_authorizer on chain: %s", resp.Message).Error())
}
// put index_fileauth_list_applier_authorizer into chain
authListIndex := packApplierAndAuthorizerIndex(fa.Applier, fa.Authorizer, fa.ID, fa.CreateTime)
if resp := x.setValue(stub, []string{authListIndex, fa.ID}); resp.Status == shim.ERROR {
return shim.Error(errorx.New(errorx.ErrCodeWriteBlockchain,
"failed to set index_fileauth_list_applier_authorizer on chain: %s", resp.Message).Error())
}
return shim.Success([]byte("OK"))
}
// ConfirmFileAuthApplication is called when the dataOwner node confirms file's authorization
func (x *Xdata) ConfirmFileAuthApplication(stub shim.ChaincodeStubInterface, args []string) pb.Response {
return x.setFileAuthConfirmStatus(stub, args, true)
}
// RejectFileAuthApplication is called when the dataOwner node rejects file's authorization
func (x *Xdata) RejectFileAuthApplication(stub shim.ChaincodeStubInterface, args []string) pb.Response {
return x.setFileAuthConfirmStatus(stub, args, false)
}
// setFileAuthConfirmStatus set file's authorization application status as Approved or Rejected
func (x *Xdata) setFileAuthConfirmStatus(stub shim.ChaincodeStubInterface, args []string, isConfirm bool) pb.Response {
// get opt
if len(args) < 1 {
return shim.Error("invalid arguments. expecting ConfirmFileAuthOptions")
}
// unmarshal opt
var opt blockchain.ConfirmFileAuthOptions
if err := json.Unmarshal([]byte(args[0]), &opt); err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal,
"failed to unmarshal ConfirmFileAuthOptions").Error())
}
// query authorization application detail by authID
fa, err := x.getFileAuthByID(stub, opt.ID)
if err != nil {
return shim.Error(err.Error())
}
// verify signature by authorizer's public key
m := fmt.Sprintf("%s,%d,", opt.ID, opt.CurrentTime)
if isConfirm {
m += fmt.Sprintf("%x,%d", opt.AuthKey, opt.ExpireTime)
} else {
m += opt.RejectReason
}
if err := x.checkSign(opt.Signature, fa.Authorizer, []byte(m)); err != nil {
return shim.Error(err.Error())
}
// check status
if fa.Status != blockchain.FileAuthUnapproved {
return shim.Error(errorx.New(errorx.ErrCodeParam,
"confirm file auth error, fileAuthStatus is not Unapproved, authID: %s, fileAuthStatus: %s", fa.ID, fa.Status).Error())
}
// update authorization status
fa.ApprovalTime = opt.CurrentTime
if isConfirm {
fa.Status = blockchain.FileAuthApproved
fa.ExpireTime = opt.ExpireTime
fa.AuthKey = opt.AuthKey
} else {
fa.Status = blockchain.FileAuthRejected
fa.RejectReason = opt.RejectReason
}
s, err := json.Marshal(fa)
if err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal, "fail to marshal FileAuthApplication").Error())
}
// update index_fileauth on chain
index := packFileAuthIndex(fa.ID)
if resp := x.setValue(stub, []string{index, string(s)}); resp.Status == shim.ERROR {
return shim.Error(errorx.New(errorx.ErrCodeWriteBlockchain,
"failed to confirm index_fileauth on chain: %s", resp.Message).Error())
}
return shim.Success([]byte("OK"))
}
// getFileAuthByID query file's authorization application by authID
func (x *Xdata) getFileAuthByID(stub shim.ChaincodeStubInterface, authID string) (fa blockchain.FileAuthApplication, err error) {
index := packFileAuthIndex(authID)
resp := x.getValue(stub, []string{index})
if len(resp.Payload) == 0 {
return fa, errorx.New(errorx.ErrCodeNotFound, "fileAuthApplication[%s] not found: %s", authID, resp.Message)
}
if err = json.Unmarshal([]byte(resp.Payload), &fa); err != nil {
return fa, errorx.NewCode(err, errorx.ErrCodeInternal,
"fail to unmarshal FileAuthApplication")
}
return fa, nil
}
// ListFileAuthApplications list the authorization applications of files
// Support query by time range and fileID
func (x *Xdata) ListFileAuthApplications(stub shim.ChaincodeStubInterface, args []string) pb.Response {
if len(args) < 1 {
return shim.Error("invalid arguments. expecting ListFileAuthOptions")
}
// unmarshal opt
var opt blockchain.ListFileAuthOptions
if err := json.Unmarshal([]byte(args[0]), &opt); err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal,
"failed to unmarshal ListFileAuthOptions").Error())
}
prefix, attr := packFileAuthFilter(opt.Applier, opt.Authorizer)
// get iter by prefix
iterator, err := stub.GetStateByPartialCompositeKey(prefix, attr)
if err != nil {
return shim.Error(err.Error())
}
defer iterator.Close()
// iterate iter
var fas blockchain.FileAuthApplications
for iterator.HasNext() {
if opt.Limit > 0 && int64(len(fas)) >= opt.Limit {
break
}
queryResponse, err := iterator.Next()
if err != nil {
return shim.Error(err.Error())
}
fa, err := x.getFileAuthByID(stub, string(queryResponse.Value))
if err != nil |
if fa.CreateTime < opt.TimeStart || fa.CreateTime > opt.TimeEnd {
continue
}
// If the fileID is not empty, query this fileID's authorization applications
if opt.FileID != "" && opt.FileID != fa.FileID {
continue
}
if opt.Status != "" && opt.Status != fa.Status {
continue
}
fas = append(fas, &fa)
}
s, err := json.Marshal(fas)
if err != nil {
return shim.Error(errorx.NewCode(err, errorx.ErrCodeInternal,
"failed to marshal FileAuthApplications").Error())
}
return shim.Success(s)
}
// GetAuthApplicationByID query authorization application detail by authID
func (x *Xdata) GetAuthApplicationByID(stub shim.ChaincodeStubInterface, args []string) pb.Response {
if len(args) < 1 {
return shim.Error("invalid arguments. missing param: fileAuthID")
}
// get authorization application detail by index_fileauth
index := packFileAuthIndex(string(args[0]))
resp := x.getValue(stub, []string{index})
if len(resp.Payload) == 0 {
return shim.Error(errorx.New(errorx.ErrCodeNotFound, "fileAuthApplication not found: %s", resp.Message).Error())
}
return shim.Success(resp.Payload)
}
| {
return shim.Error(err.Error())
} |
blocks.py | from django.conf import settings
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
import requests
from wagtail.core import blocks | from .markup import AMPText
class AMPCleanHTMLBlock(blocks.RawHTMLBlock):
def clean(self, value):
if isinstance(value, AMPText) and getattr(settings, 'WAGTAIL_WEBSTORIES_CLEAN_HTML', True):
return AMPText(StoryPage.clean_html_fragment(value.source))
else:
return value
def get_default(self):
if isinstance(self.meta.default, AMPText):
return self.meta.default
else:
return AMPText(self.meta.default)
def to_python(self, value):
if isinstance(value, AMPText):
return value
else:
return AMPText(value)
def get_prep_value(self, value):
if isinstance(value, AMPText):
return value.source
else:
return value
def value_for_form(self, value):
if isinstance(value, AMPText):
return value.source
else:
return value
def value_from_form(self, value):
return AMPText(value)
class PageBlock(blocks.StructBlock):
id = blocks.CharBlock()
html = AMPCleanHTMLBlock()
class StoryChooserBlock(blocks.PageChooserBlock):
def __init__(self, **kwargs):
has_specified_page_type = kwargs.get('page_type') or kwargs.get('target_model')
if not has_specified_page_type:
# allow selecting any page model that inherits from BaseWebStoryPage
from .models import get_story_page_models
kwargs['target_model'] = get_story_page_models()
super().__init__(**kwargs)
def get_context(self, value, parent_context=None):
context = super().get_context(value, parent_context=parent_context)
context['page'] = value.specific
return context
class Meta:
template = 'wagtail_webstories/blocks/story_poster_link.html'
class StoryEmbedBlock(StoryChooserBlock):
class Meta:
template = 'wagtail_webstories/blocks/story_embed_block.html'
class ExternalStoryBlock(blocks.URLBlock):
def get_default(self):
from .models import ExternalStory
# Allow specifying the default as either an ExternalStory or a URL string (or None).
if not self.meta.default:
return None
elif isinstance(self.meta.default, ExternalStory):
return self.meta.default
else:
# assume default has been passed as a string
return ExternalStory.get_for_url(self.meta.default)
def to_python(self, value):
from .models import ExternalStory
# The JSON representation of an ExternalStoryBlock value is a URL string;
# this should be converted to an ExternalStory instance (or None).
if not value:
return None
else:
return ExternalStory.get_for_url(value)
def get_prep_value(self, value):
# serialisable value should be a URL string
if value is None:
return ''
elif isinstance(value, str):
return value
else:
return value.url
def value_for_form(self, value):
# the value to be handled by the URLField is a plain URL string (or the empty string)
if value is None:
return ''
elif isinstance(value, str):
return value
else:
return value.url
def value_from_form(self, value):
# Keep value as a string, and convert to an ExternalStory during clean
return value or None
def clean(self, value):
from .models import ExternalStory
value = super().clean(value)
if value is not None:
try:
value = ExternalStory.get_for_url(value)
except requests.exceptions.RequestException:
raise ValidationError(_("Could not fetch URL."))
except Story.InvalidStoryException:
raise ValidationError(_("URL is not a valid web story."))
return value
def get_context(self, value, parent_context=None):
context = super().get_context(value, parent_context=parent_context)
context['story'] = value
return context
class Meta:
template = 'wagtail_webstories/blocks/external_story_poster_link.html'
class ExternalStoryEmbedBlock(ExternalStoryBlock):
class Meta:
template = 'wagtail_webstories/blocks/external_story_embed_block.html' | from webstories import Story, StoryPage
|
pam.rs | //! PAMv3 related types.
use super::object::Object;
use bitflags::bitflags;
use std::collections::HashMap;
use std::fmt::Debug;
bitflags! {
/// Permissions bitmask. Values can be combined with a bitwise OR operation.
///
/// |Name |Value (binary)|Value (hex)|Value (dec)|Description |
/// |--------|--------------|-----------|-----------|-------------------------------------------------|
/// |`READ` |`0b0000_0001` |`0x01` |`1` |Applies to Subscribe, History, Presence, Objects |
/// |`WRITE` |`0b0000_0010` |`0x02` |`2` |Applies to Publish, Objects |
/// |`MANAGE`|`0b0000_0100` |`0x04` |`4` |Applies to Channel-Groups, Objects |
/// |`DELETE`|`0b0000_1000` |`0x08` |`8` |Applies to History |
/// |`CREATE`|`0b0001_0000` |`0x10` |`16` |Applies to Objects |
///
/// ## Permissions matrix:
///
/// |Resource type|Permission|API |Allowances |
/// |-------------|----------|-----------------------|---------------------------------------------------|
/// |`channels` |`READ` |Subscribe |Receiving messages on a channel |
/// |`channels` |`READ` |Presence Here Now |Listing UUIDs subscribed to a channel |
/// |`channels` |`READ` |Presence User State |Set/get state on a channel |
/// |`channels` |`READ` |Push; Add Device |Adding a device to a channel for push notifications|
/// |`channels` |`READ` |History |Receiving historical messages on a channel |
/// |`channels` |`DELETE` |History; Delete |Deleting historical messages on a channel |
/// |`channels` |`WRITE` |Publish |Sending messages on a channel |
/// |`channels` |`WRITE` |Signal |Sending signals on a channel |
/// |`groups` |`READ` |Subscribe |Receiving messages on a channel-group |
/// |`groups` |`READ` |Presence Here Now |Listing UUIDs subscribed to a channel-group |
/// |`groups` |`READ` |Presence User State |Set/get state on a channel-group |
/// |`groups` |`READ` |Groups; List |Listing all channels in a channel-group |
/// |`groups` |`MANAGE` |Groups; Add Channels |Adding channels to a channel-group |
/// |`groups` |`MANAGE` |Groups; Remove Channels|Removing channels from a channel-group |
/// |`groups` |`MANAGE` |Delete Group |Deleting a channel-group |
/// |`users` |`CREATE` |User; Create |Creating a user by `UserID` |
/// |`users` |`DELETE` |User; Delete |Deleting a user and all of its space memberships |
/// |`users` |`MANAGE` |User; Add membership |Adding space membership for a user |
/// |`users` |`READ` |User; Read |Reading a user's information and space memberships |
/// |`users` |`WRITE` |User; Update |Updating a user's information |
/// |`spaces` |`CREATE` |Space; Create |Creating a space by `SpaceID` |
/// |`spaces` |`DELETE` |Space; Delete |Deleting a space and all of its members |
/// |`spaces` |`MANAGE` |Space; Add members |Adding members to a space |
/// |`spaces` |`READ` |Space; Read |Reading a space's information and member users |
/// |`spaces` |`WRITE` |Space; Update |Updating a space's information |
/// |`spaces` |`MANAGE` |Space; User Memberships|Adding and removing members from a space |
///
/// **⚠️ Use of undocumented bitmask values or combinations with resource
/// types is considered undefined behavior; Using undefined behavior in
/// grant requests or within tokens passed to any PubNub REST API are
/// allowed to break in unexpected ways, including spawning
/// ["nasal demons"](http://www.catb.org/jargon/html/N/nasal-demons.html).**
pub struct BitMask: u64 {
/// Applies to Subscribe, History, Presence, Objects
const READ = 0b0000_0001;
/// Applies to Publish, Objects
const WRITE = 0b0000_0010;
/// Applies to Channel-Groups, Objects
const MANAGE = 0b0000_0100;
/// Applies to History
const DELETE = 0b0000_1000;
/// Applies to Objects
const CREATE = 0b0001_0000;
}
}
/// The PAMv3 grant request body.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct GrantBody {
/// The total duration (in minutes) that the token will remain valid
/// The minimum ttl allowed is 1 minute. The maximum ttl allowed is 43,200
/// minutes (equivalent to 30 days).
// TODO: use a constrained type here.
pub ttl: u32,
/// Permissions object schema.
pub permissions: Permissions,
}
/// Grant permissions.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Permissions {
/// A mapping of resource types to resource IDs.
pub resources: Resources,
/// A mapping of resource types to regular expressions.
pub patterns: Patterns,
/// The meta mapping is available for arbitrary key-value pairs, to use
/// as your application sees fit. Beware that the `meta` object is copied
/// into the token verbatim; potentially being a significant source of
/// "token bloat".
///
/// This mapping may be used for identity/authentication purposes,
/// restricting token use (in the "public key use" sense as defined by JWK),
/// or exclusions/exceptions.
///
/// PubNub reserves all keys beginning with the three-character prefix `pn-`
/// for future purposes.
///
/// Use of undocumented reserved meta fields is considered undefined
/// behavior
pub meta: Object,
}
/// A mapping of resource types to permissions.
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Resources {
/// A shallow mapping of channel names to permissions.
pub channels: HashMap<String, BitMask>,
/// A shallow mapping of channel groups to permissions.
pub groups: HashMap<String, BitMask>,
/// A shallow mapping of user IDs to permissions.
pub users: HashMap<String, BitMask>,
| pub spaces: HashMap<String, BitMask>,
}
type PatternRegex = String;
/// A mapping of resource types as regular expressions to permissions.
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Patterns {
/// A shallow mapping of channel regular expressions to permissions.
pub channels: HashMap<PatternRegex, BitMask>,
/// A shallow mapping of channel-group regular expressions to permissions.
pub groups: HashMap<PatternRegex, BitMask>,
/// A shallow mapping of user ID regular expressions to permissions.
pub users: HashMap<PatternRegex, BitMask>,
/// A shallow mapping of space ID regular expressions to permissions.
pub spaces: HashMap<PatternRegex, BitMask>,
} | /// A shallow mapping of space IDs to permissions. |
_decorators.py | from typing import TypeVar, Callable
import unittest
from ._types import TestMethod
_F = TypeVar("_F", bound=TestMethod)
def test(method: _F) -> _F:
"""Decorator that flags a method as a test method."""
method._dectest_test = True # type: ignore
return method
def before(method: _F) -> _F:
"""Decorator that flags a method as fixture setup.
Fixture setup methods from base classes are guaranteed to be executed
before setup methods from derived classes.
"""
method._dectest_before = True # type: ignore
return method
def after(method: _F) -> _F:
"""Decorator that flags a method as fixture teardown.
Fixture teardown methods from base classes are guaranteed to be executed
after teardown methods from derived classes.
"""
method._dectest_after = True # type: ignore
return method
def skip(reason: str) -> Callable[[_F], _F]:
"""Unconditionally skip the decorated test.
This is equivalent to @unittest.skip, but also marks the decorated
function as a test.
"""
if not isinstance(reason, str):
raise TypeError("first argument to @skip must be a reason string")
def decorate(method: _F) -> _F:
return unittest.skip(reason)(test(method))
return decorate
def skip_if(condition: bool, reason: str) -> Callable[[_F], _F]:
"""Skip the decorated test if condition is true.
This is equivalent to @unittest.skipIf, but also marks the decorated
function as a test.
"""
def decorate(method: _F) -> _F:
return unittest.skipIf(condition, reason)(test(method))
return decorate
def skip_unless(condition: bool, reason: str) -> Callable[[_F], _F]:
| """Skip the decorated test unless condition is true.
This is equivalent to @unittest.skipUnless, but also marks the decorated
function as a test.
"""
def decorate(method: _F) -> _F:
return unittest.skipUnless(condition, reason)(test(method))
return decorate |
|
parse_llvm_coverage.py | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Parse an LLVM coverage report to generate useable results."""
import argparse
import json
import os
import re
import subprocess
import sys
def _fix_filename(filename):
"""Return a filename which we can use to identify the file.
The file paths printed by llvm-cov take the form:
/path/to/repo/out/dir/../../src/filename.cpp
And then they're truncated to 22 characters with leading ellipses:
...../../src/filename.cpp
This makes it really tough to determine whether the file actually belongs in
the Skia repo. This function strips out the leading junk so that, if the file
exists in the repo, the returned string matches the end of some relative path
in the repo. This doesn't guarantee correctness, but it's about as close as
we can get.
"""
return filename.split('..')[-1].lstrip('./')
def _file_in_repo(filename, all_files):
"""Return the name of the checked-in file matching the given filename.
Use suffix matching to determine which checked-in files the given filename
matches. If there are no matches or multiple matches, return None.
"""
new_file = _fix_filename(filename)
matched = []
for f in all_files:
if f.endswith(new_file):
matched.append(f)
if len(matched) == 1:
|
elif len(matched) > 1:
print >> sys.stderr, ('WARNING: multiple matches for %s; skipping:\n\t%s'
% (new_file, '\n\t'.join(matched)))
return None
def _get_per_file_per_line_coverage(report):
"""Return a dict whose keys are file names and values are coverage data.
Values are lists which take the form (lineno, coverage, code).
"""
all_files = subprocess.check_output(['git', 'ls-files']).splitlines()
lines = report.splitlines()
current_file = None
file_lines = []
files = {}
not_checked_in = '%' # Use this as the file name for not-checked-in files.
for line in lines:
m = re.match('([a-zA-Z0-9\./_-]+):', line)
if m:
if current_file and current_file != not_checked_in:
files[current_file] = file_lines
match_filename = _file_in_repo(m.groups()[0], all_files)
current_file = match_filename or not_checked_in
file_lines = []
else:
if current_file != not_checked_in:
skip = re.match('^\s{2}-+$|^\s{2}\|.+$', line)
if line and not skip:
cov, linenum, code = line.split('|', 2)
cov = cov.strip()
if cov:
cov = int(cov)
else:
cov = None # We don't care about coverage for this line.
linenum = int(linenum.strip())
assert linenum == len(file_lines) + 1
file_lines.append((linenum, cov, code.decode('utf-8', 'replace')))
return files
def _testname(filename):
"""Transform the file name into an ingestible test name."""
return re.sub(r'[^a-zA-Z0-9]', '_', filename)
def _nanobench_json(results, properties, key):
"""Return the results in JSON format like that produced by nanobench."""
rv = {}
# Copy over the properties first, then set the 'key' and 'results' keys,
# in order to avoid bad formatting in case the user passes in a properties
# dict containing those keys.
rv.update(properties)
rv['key'] = key
rv['results'] = {
_testname(f): {
'coverage': {
'percent': percent,
'lines_not_covered': not_covered_lines,
'options': {
'fullname': f,
'dir': os.path.dirname(f),
'source_type': 'coverage',
},
},
} for percent, not_covered_lines, f in results
}
return rv
def _parse_key_value(kv_list):
"""Return a dict whose key/value pairs are derived from the given list.
For example:
['k1', 'v1', 'k2', 'v2']
becomes:
{'k1': 'v1',
'k2': 'v2'}
"""
if len(kv_list) % 2 != 0:
raise Exception('Invalid key/value pairs: %s' % kv_list)
rv = {}
for i in xrange(len(kv_list) / 2):
rv[kv_list[i*2]] = kv_list[i*2+1]
return rv
def _get_per_file_summaries(line_by_line):
"""Summarize the full line-by-line coverage report by file."""
per_file = []
for filepath, lines in line_by_line.iteritems():
total_lines = 0
covered_lines = 0
for _, cov, _ in lines:
if cov is not None:
total_lines += 1
if cov > 0:
covered_lines += 1
if total_lines > 0:
per_file.append((float(covered_lines)/float(total_lines)*100.0,
total_lines - covered_lines,
filepath))
return per_file
def main():
"""Generate useful data from a coverage report."""
# Parse args.
parser = argparse.ArgumentParser()
parser.add_argument('--report', help='input file; an llvm coverage report.',
required=True)
parser.add_argument('--nanobench', help='output file for nanobench data.')
parser.add_argument(
'--key', metavar='key_or_value', nargs='+',
help='key/value pairs identifying this bot.')
parser.add_argument(
'--properties', metavar='key_or_value', nargs='+',
help='key/value pairs representing properties of this build.')
parser.add_argument('--linebyline',
help='output file for line-by-line JSON data.')
args = parser.parse_args()
if args.nanobench and not (args.key and args.properties):
raise Exception('--key and --properties are required with --nanobench')
with open(args.report) as f:
report = f.read()
line_by_line = _get_per_file_per_line_coverage(report)
if args.linebyline:
with open(args.linebyline, 'w') as f:
json.dump(line_by_line, f)
if args.nanobench:
# Parse the key and properties for use in the nanobench JSON output.
key = _parse_key_value(args.key)
properties = _parse_key_value(args.properties)
# Get per-file summaries.
per_file = _get_per_file_summaries(line_by_line)
# Write results.
format_results = _nanobench_json(per_file, properties, key)
with open(args.nanobench, 'w') as f:
json.dump(format_results, f)
if __name__ == '__main__':
main()
| return matched[0] |
metadata.go | package saml
import (
"encoding/xml"
"time"
"github.com/beevik/etree"
)
// HTTPPostBinding is the official URN for the HTTP-POST binding (transport)
const HTTPPostBinding = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
// HTTPRedirectBinding is the official URN for the HTTP-Redirect binding (transport)
const HTTPRedirectBinding = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
// HTTPArtifactBinding is the official URN for the HTTP-Artifact binding (transport)
const HTTPArtifactBinding = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact"
// SOAPBinding is the official URN for the SOAP binding (transport)
const SOAPBinding = "urn:oasis:names:tc:SAML:2.0:bindings:SOAP"
// EntitiesDescriptor represents the SAML object of the same name.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.3.1
type EntitiesDescriptor struct {
XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata EntitiesDescriptor"`
ID *string `xml:",attr,omitempty"`
ValidUntil *time.Time `xml:"validUntil,attr,omitempty"`
CacheDuration *time.Duration `xml:"cacheDuration,attr,omitempty"`
Name *string `xml:",attr,omitempty"`
Signature *etree.Element
EntitiesDescriptors []EntitiesDescriptor `xml:"urn:oasis:names:tc:SAML:2.0:metadata EntitiesDescriptor"`
EntityDescriptors []EntityDescriptor `xml:"urn:oasis:names:tc:SAML:2.0:metadata EntityDescriptor"`
}
// Metadata as been renamed to EntityDescriptor
//
// This change was made to be consistent with the rest of the API which uses names
// from the SAML specification for types.
//
// This is a tombstone to help you discover this fact. You should update references
// to saml.Metadata to be saml.EntityDescriptor.
var Metadata = struct{}{}
// EntityDescriptor represents the SAML EntityDescriptor object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.3.2
type EntityDescriptor struct {
XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata EntityDescriptor"`
EntityID string `xml:"entityID,attr"`
ID string `xml:",attr,omitempty"`
ValidUntil time.Time `xml:"validUntil,attr,omitempty"`
CacheDuration time.Duration `xml:"cacheDuration,attr,omitempty"`
Signature *etree.Element
RoleDescriptors []RoleDescriptor `xml:"RoleDescriptor"`
IDPSSODescriptors []IDPSSODescriptor `xml:"IDPSSODescriptor"`
SPSSODescriptors []SPSSODescriptor `xml:"SPSSODescriptor"`
AuthnAuthorityDescriptors []AuthnAuthorityDescriptor `xml:"AuthnAuthorityDescriptor"`
AttributeAuthorityDescriptors []AttributeAuthorityDescriptor `xml:"AttributeAuthorityDescriptor"`
PDPDescriptors []PDPDescriptor `xml:"PDPDescriptor"`
AffiliationDescriptor *AffiliationDescriptor
Organization *Organization
ContactPerson *ContactPerson
AdditionalMetadataLocations []string `xml:"AdditionalMetadataLocation"`
}
// MarshalXML implements xml.Marshaler
func (m EntityDescriptor) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
type Alias EntityDescriptor
aux := &struct {
ValidUntil RelaxedTime `xml:"validUntil,attr,omitempty"`
CacheDuration Duration `xml:"cacheDuration,attr,omitempty"`
*Alias
}{
ValidUntil: RelaxedTime(m.ValidUntil),
CacheDuration: Duration(m.CacheDuration),
Alias: (*Alias)(&m),
}
return e.Encode(aux)
}
// UnmarshalXML implements xml.Unmarshaler
func (m *EntityDescriptor) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
type Alias EntityDescriptor | }{
Alias: (*Alias)(m),
}
if err := d.DecodeElement(aux, &start); err != nil {
return err
}
m.ValidUntil = time.Time(aux.ValidUntil)
m.CacheDuration = time.Duration(aux.CacheDuration)
return nil
}
// Organization represents the SAML Organization object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.3.2.1
type Organization struct {
OrganizationNames []LocalizedName `xml:"OrganizationName"`
OrganizationDisplayNames []LocalizedName `xml:"OrganizationDisplayName"`
OrganizationURLs []LocalizedURI `xml:"OrganizationURL"`
}
// LocalizedName represents the SAML type localizedNameType.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.2.4
type LocalizedName struct {
Lang string `xml:"http://www.w3.org/XML/1998/namespace lang,attr"`
Value string `xml:",chardata"`
}
// LocalizedURI represents the SAML type localizedURIType.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.2.5
type LocalizedURI struct {
Lang string `xml:"http://www.w3.org/XML/1998/namespace lang,attr"`
Value string `xml:",chardata"`
}
// ContactPerson represents the SAML element ContactPerson.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.3.2.2
type ContactPerson struct {
ContactType string `xml:"contactType,attr"`
Company string
GivenName string
SurName string
EmailAddresses []string `xml:"EmailAddress"`
TelephoneNumbers []string `xml:"TelephoneNumber"`
}
// RoleDescriptor represents the SAML element RoleDescriptor.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.1
type RoleDescriptor struct {
ID string `xml:",attr,omitempty"`
ValidUntil *time.Time `xml:"validUntil,attr,omitempty"`
CacheDuration time.Duration `xml:"cacheDuration,attr,omitempty"`
ProtocolSupportEnumeration string `xml:"protocolSupportEnumeration,attr"`
ErrorURL string `xml:"errorURL,attr,omitempty"`
Signature *etree.Element
KeyDescriptors []KeyDescriptor `xml:"KeyDescriptor,omitempty"`
Organization *Organization `xml:"Organization,omitempty"`
ContactPeople []ContactPerson `xml:"ContactPerson,omitempty"`
}
// KeyDescriptor represents the XMLSEC object of the same name
type KeyDescriptor struct {
Use string `xml:"use,attr"`
KeyInfo KeyInfo `xml:"http://www.w3.org/2000/09/xmldsig# KeyInfo"`
EncryptionMethods []EncryptionMethod `xml:"EncryptionMethod"`
}
// EncryptionMethod represents the XMLSEC object of the same name
type EncryptionMethod struct {
Algorithm string `xml:"Algorithm,attr"`
}
// KeyInfo represents the XMLSEC object of the same name
type KeyInfo struct {
XMLName xml.Name `xml:"http://www.w3.org/2000/09/xmldsig# KeyInfo"`
X509Data X509Data `xml:"X509Data"`
}
// X509Data represents the XMLSEC object of the same name
type X509Data struct {
XMLName xml.Name `xml:"http://www.w3.org/2000/09/xmldsig# X509Data"`
X509Certificates []X509Certificate `xml:"X509Certificate"`
}
// X509Certificate represents the XMLSEC object of the same name
type X509Certificate struct {
XMLName xml.Name `xml:"http://www.w3.org/2000/09/xmldsig# X509Certificate"`
Data string `xml:",chardata"`
}
// Endpoint represents the SAML EndpointType object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.2.2
type Endpoint struct {
Binding string `xml:"Binding,attr"`
Location string `xml:"Location,attr"`
ResponseLocation string `xml:"ResponseLocation,attr,omitempty"`
}
// IndexedEndpoint represents the SAML IndexedEndpointType object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.2.3
type IndexedEndpoint struct {
Binding string `xml:"Binding,attr"`
Location string `xml:"Location,attr"`
ResponseLocation *string `xml:"ResponseLocation,attr,omitempty"`
Index int `xml:"index,attr"`
IsDefault *bool `xml:"isDefault,attr"`
}
// SSODescriptor represents the SAML complex type SSODescriptor
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.2
type SSODescriptor struct {
RoleDescriptor
ArtifactResolutionServices []IndexedEndpoint `xml:"ArtifactResolutionService"`
SingleLogoutServices []Endpoint `xml:"SingleLogoutService"`
ManageNameIDServices []Endpoint `xml:"ManageNameIDService"`
NameIDFormats []NameIDFormat `xml:"NameIDFormat"`
}
// IDPSSODescriptor represents the SAML IDPSSODescriptorType object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.3
type IDPSSODescriptor struct {
XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata IDPSSODescriptor"`
SSODescriptor
WantAuthnRequestsSigned *bool `xml:",attr"`
SingleSignOnServices []Endpoint `xml:"SingleSignOnService"`
ArtifactResolutionServices []Endpoint `xml:"ArtifactResolutionService"`
NameIDMappingServices []Endpoint `xml:"NameIDMappingService"`
AssertionIDRequestServices []Endpoint `xml:"AssertionIDRequestService"`
AttributeProfiles []string `xml:"AttributeProfile"`
Attributes []Attribute `xml:"Attribute"`
}
// SPSSODescriptor represents the SAML SPSSODescriptorType object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.2
type SPSSODescriptor struct {
XMLName xml.Name `xml:"urn:oasis:names:tc:SAML:2.0:metadata SPSSODescriptor"`
SSODescriptor
AuthnRequestsSigned *bool `xml:",attr"`
WantAssertionsSigned *bool `xml:",attr"`
AssertionConsumerServices []IndexedEndpoint `xml:"AssertionConsumerService"`
AttributeConsumingServices []AttributeConsumingService `xml:"AttributeConsumingService"`
}
// AttributeConsumingService represents the SAML AttributeConsumingService object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.4.1
type AttributeConsumingService struct {
Index int `xml:"index,attr"`
IsDefault *bool `xml:"isDefault,attr"`
ServiceNames []LocalizedName `xml:"ServiceName"`
ServiceDescriptions []LocalizedName `xml:"ServiceDescription"`
RequestedAttributes []RequestedAttribute `xml:"RequestedAttribute"`
}
// RequestedAttribute represents the SAML RequestedAttribute object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.4.2
type RequestedAttribute struct {
Attribute
IsRequired *bool `xml:"isRequired,attr"`
}
// AuthnAuthorityDescriptor represents the SAML AuthnAuthorityDescriptor object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.5
type AuthnAuthorityDescriptor struct {
RoleDescriptor
AuthnQueryServices []Endpoint `xml:"AuthnQueryService"`
AssertionIDRequestServices []Endpoint `xml:"AssertionIDRequestService"`
NameIDFormats []NameIDFormat `xml:"NameIDFormat"`
}
// PDPDescriptor represents the SAML PDPDescriptor object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.6
type PDPDescriptor struct {
RoleDescriptor
AuthzServices []Endpoint `xml:"AuthzService"`
AssertionIDRequestServices []Endpoint `xml:"AssertionIDRequestService"`
NameIDFormats []NameIDFormat `xml:"NameIDFormat"`
}
// AttributeAuthorityDescriptor represents the SAML AttributeAuthorityDescriptor object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.4.7
type AttributeAuthorityDescriptor struct {
RoleDescriptor
AttributeServices []Endpoint `xml:"AttributeService"`
AssertionIDRequestServices []Endpoint `xml:"AssertionIDRequestService"`
NameIDFormats []NameIDFormat `xml:"NameIDFormat"`
AttributeProfiles []string `xml:"AttributeProfile"`
Attributes []Attribute `xml:"Attribute"`
}
// AffiliationDescriptor represents the SAML AffiliationDescriptor object.
//
// See http://docs.oasis-open.org/security/saml/v2.0/saml-metadata-2.0-os.pdf §2.5
type AffiliationDescriptor struct {
AffiliationOwnerID string `xml:"affiliationOwnerID,attr"`
ID string `xml:",attr"`
ValidUntil time.Time `xml:"validUntil,attr,omitempty"`
CacheDuration time.Duration `xml:"cacheDuration,attr"`
Signature *etree.Element
AffiliateMembers []string `xml:"AffiliateMember"`
KeyDescriptors []KeyDescriptor `xml:"KeyDescriptor"`
} | aux := &struct {
ValidUntil RelaxedTime `xml:"validUntil,attr,omitempty"`
CacheDuration Duration `xml:"cacheDuration,attr,omitempty"`
*Alias |
exec.go | package exec
import (
"bytes"
"context"
"encoding/json"
"reflect"
"sync"
"github.com/graph-gophers/graphql-go/errors"
"github.com/graph-gophers/graphql-go/internal/common"
"github.com/graph-gophers/graphql-go/internal/exec/resolvable"
"github.com/graph-gophers/graphql-go/internal/exec/selected"
"github.com/graph-gophers/graphql-go/internal/query"
"github.com/graph-gophers/graphql-go/internal/schema"
"github.com/graph-gophers/graphql-go/log"
"github.com/graph-gophers/graphql-go/trace"
)
type Request struct {
selected.Request
Limiter chan struct{}
Tracer trace.Tracer
Logger log.Logger
}
func (r *Request) handlePanic(ctx context.Context) {
if value := recover(); value != nil {
r.Logger.LogPanic(ctx, value)
r.AddError(makePanicError(value))
}
}
func makePanicError(value interface{}) *errors.QueryError |
func (r *Request) Execute(ctx context.Context, s *resolvable.Schema, op *query.Operation) ([]byte, []*errors.QueryError) {
var out bytes.Buffer
func() {
defer r.handlePanic(ctx)
sels := selected.ApplyOperation(&r.Request, s, op)
r.execSelections(ctx, sels, nil, s.Resolver, &out, op.Type == query.Mutation)
}()
if err := ctx.Err(); err != nil {
return nil, []*errors.QueryError{errors.Errorf("%s", err)}
}
return out.Bytes(), r.Errs
}
type fieldToExec struct {
field *selected.SchemaField
sels []selected.Selection
resolver reflect.Value
out *bytes.Buffer
}
func (r *Request) execSelections(ctx context.Context, sels []selected.Selection, path *pathSegment, resolver reflect.Value, out *bytes.Buffer, serially bool) {
async := !serially && selected.HasAsyncSel(sels)
var fields []*fieldToExec
collectFieldsToResolve(sels, resolver, &fields, make(map[string]*fieldToExec))
if async {
var wg sync.WaitGroup
wg.Add(len(fields))
for _, f := range fields {
go func(f *fieldToExec) {
defer wg.Done()
defer r.handlePanic(ctx)
f.out = new(bytes.Buffer)
execFieldSelection(ctx, r, f, &pathSegment{path, f.field.Alias}, true)
}(f)
}
wg.Wait()
}
out.WriteByte('{')
for i, f := range fields {
if i > 0 {
out.WriteByte(',')
}
out.WriteByte('"')
out.WriteString(f.field.Alias)
out.WriteByte('"')
out.WriteByte(':')
if async {
out.Write(f.out.Bytes())
continue
}
f.out = out
execFieldSelection(ctx, r, f, &pathSegment{path, f.field.Alias}, false)
}
out.WriteByte('}')
}
func collectFieldsToResolve(sels []selected.Selection, resolver reflect.Value, fields *[]*fieldToExec, fieldByAlias map[string]*fieldToExec) {
for _, sel := range sels {
switch sel := sel.(type) {
case *selected.SchemaField:
field, ok := fieldByAlias[sel.Alias]
if !ok { // validation already checked for conflict (TODO)
field = &fieldToExec{field: sel, resolver: resolver}
fieldByAlias[sel.Alias] = field
*fields = append(*fields, field)
}
field.sels = append(field.sels, sel.Sels...)
case *selected.TypenameField:
sf := &selected.SchemaField{
Field: resolvable.MetaFieldTypename,
Alias: sel.Alias,
FixedResult: reflect.ValueOf(typeOf(sel, resolver)),
}
*fields = append(*fields, &fieldToExec{field: sf, resolver: resolver})
case *selected.TypeAssertion:
out := resolver.Method(sel.MethodIndex).Call(nil)
if !out[1].Bool() {
continue
}
collectFieldsToResolve(sel.Sels, out[0], fields, fieldByAlias)
default:
panic("unreachable")
}
}
}
func typeOf(tf *selected.TypenameField, resolver reflect.Value) string {
if len(tf.TypeAssertions) == 0 {
return tf.Name
}
for name, a := range tf.TypeAssertions {
out := resolver.Method(a.MethodIndex).Call(nil)
if out[1].Bool() {
return name
}
}
return ""
}
func execFieldSelection(ctx context.Context, r *Request, f *fieldToExec, path *pathSegment, applyLimiter bool) {
if applyLimiter {
r.Limiter <- struct{}{}
}
var result reflect.Value
var err *errors.QueryError
traceCtx, finish := r.Tracer.TraceField(ctx, f.field.TraceLabel, f.field.TypeName, f.field.Name, !f.field.Async, f.field.Args)
defer func() {
finish(err)
}()
err = func() (err *errors.QueryError) {
defer func() {
if panicValue := recover(); panicValue != nil {
r.Logger.LogPanic(ctx, panicValue)
err = makePanicError(panicValue)
err.Path = path.toSlice()
}
}()
if f.field.FixedResult.IsValid() {
result = f.field.FixedResult
return nil
}
if err := traceCtx.Err(); err != nil {
return errors.Errorf("%s", err) // don't execute any more resolvers if context got cancelled
}
var in []reflect.Value
if f.field.HasContext {
in = append(in, reflect.ValueOf(traceCtx))
}
if f.field.ArgsPacker != nil {
in = append(in, f.field.PackedArgs)
}
callOut := f.resolver.Method(f.field.MethodIndex).Call(in)
result = callOut[0]
if f.field.HasError && !callOut[1].IsNil() {
resolverErr := callOut[1].Interface().(error)
err := errors.Errorf("%s", resolverErr)
err.Path = path.toSlice()
err.ResolverError = resolverErr
return err
}
return nil
}()
if applyLimiter {
<-r.Limiter
}
if err != nil {
r.AddError(err)
f.out.WriteString("null") // TODO handle non-nil
return
}
r.execSelectionSet(traceCtx, f.sels, f.field.Type, path, result, f.out)
}
func (r *Request) execSelectionSet(ctx context.Context, sels []selected.Selection, typ common.Type, path *pathSegment, resolver reflect.Value, out *bytes.Buffer) {
t, nonNull := unwrapNonNull(typ)
switch t := t.(type) {
case *schema.Object, *schema.Interface, *schema.Union:
if resolver.Kind() == reflect.Ptr && resolver.IsNil() {
if nonNull {
panic(errors.Errorf("got nil for non-null %q", t))
}
out.WriteString("null")
return
}
r.execSelections(ctx, sels, path, resolver, out, false)
return
}
if !nonNull {
if resolver.IsNil() {
out.WriteString("null")
return
}
resolver = resolver.Elem()
}
switch t := t.(type) {
case *common.List:
l := resolver.Len()
if selected.HasAsyncSel(sels) {
var wg sync.WaitGroup
wg.Add(l)
entryouts := make([]bytes.Buffer, l)
for i := 0; i < l; i++ {
go func(i int) {
defer wg.Done()
defer r.handlePanic(ctx)
r.execSelectionSet(ctx, sels, t.OfType, &pathSegment{path, i}, resolver.Index(i), &entryouts[i])
}(i)
}
wg.Wait()
out.WriteByte('[')
for i, entryout := range entryouts {
if i > 0 {
out.WriteByte(',')
}
out.Write(entryout.Bytes())
}
out.WriteByte(']')
return
}
out.WriteByte('[')
for i := 0; i < l; i++ {
if i > 0 {
out.WriteByte(',')
}
r.execSelectionSet(ctx, sels, t.OfType, &pathSegment{path, i}, resolver.Index(i), out)
}
out.WriteByte(']')
case *schema.Scalar:
v := resolver.Interface()
data, err := json.Marshal(v)
if err != nil {
panic(errors.Errorf("could not marshal %v", v))
}
out.Write(data)
case *schema.Enum:
out.WriteByte('"')
out.WriteString(resolver.String())
out.WriteByte('"')
default:
panic("unreachable")
}
}
func unwrapNonNull(t common.Type) (common.Type, bool) {
if nn, ok := t.(*common.NonNull); ok {
return nn.OfType, true
}
return t, false
}
type pathSegment struct {
parent *pathSegment
value interface{}
}
func (p *pathSegment) toSlice() []interface{} {
if p == nil {
return nil
}
return append(p.parent.toSlice(), p.value)
}
| {
return errors.Errorf("graphql: panic occurred: %v", value)
} |
scaleset_vmsclient.go | package azure
import (
"context"
"fmt"
"github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2018-06-01/compute"
"github.com/Azure/go-autorest/autorest"
)
type scaleSetVMsClient struct {
scaleSetName string
resourceGroupName string
client *compute.VirtualMachineScaleSetVMsClient
}
func | (
config Config,
baseURI string,
authorizer autorest.Authorizer,
) vmsClient {
vmsClient := compute.NewVirtualMachineScaleSetVMsClientWithBaseURI(baseURI, config.SubscriptionID)
vmsClient.Authorizer = authorizer
vmsClient.PollingDelay = clientPollingDelay
vmsClient.AddToUserAgent(config.UserAgent)
return &scaleSetVMsClient{
scaleSetName: config.ScaleSetName,
resourceGroupName: config.ResourceGroupName,
client: &vmsClient,
}
}
func (s *scaleSetVMsClient) name(instanceID string) string {
return s.scaleSetName + "_" + instanceID
}
func (s *scaleSetVMsClient) describe(
instanceID string,
) (interface{}, error) {
return s.describeInstance(instanceID)
}
func (s *scaleSetVMsClient) getDataDisks(
instanceID string,
) ([]compute.DataDisk, error) {
vm, err := s.describeInstance(instanceID)
if err != nil {
return nil, err
}
if vm.StorageProfile == nil || vm.StorageProfile.DataDisks == nil {
return nil, fmt.Errorf("vm storage profile is invalid")
}
return *vm.StorageProfile.DataDisks, nil
}
func (s *scaleSetVMsClient) updateDataDisks(
instanceID string,
dataDisks []compute.DataDisk,
) error {
vm, err := s.describeInstance(instanceID)
if err != nil {
return err
}
vm.StorageProfile.DataDisks = &dataDisks
ctx := context.Background()
future, err := s.client.Update(
ctx,
s.resourceGroupName,
s.scaleSetName,
instanceID,
vm,
)
if err != nil {
return err
}
err = future.WaitForCompletionRef(ctx, s.client.Client)
if err != nil {
return err
}
return nil
}
func (s *scaleSetVMsClient) describeInstance(
instanceID string,
) (compute.VirtualMachineScaleSetVM, error) {
return s.client.Get(
context.Background(),
s.resourceGroupName,
s.scaleSetName,
instanceID,
)
}
| newScaleSetVMsClient |
function.go | /*
* Copyright (c) 2017, MegaEase
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package function
import (
"fmt"
"github.com/megaease/easegress/pkg/context"
"github.com/megaease/easegress/pkg/filter/proxy"
"github.com/megaease/easegress/pkg/filter/requestadaptor"
"github.com/megaease/easegress/pkg/logger"
"github.com/megaease/easegress/pkg/object/httppipeline"
"github.com/megaease/easegress/pkg/supervisor"
"github.com/megaease/easegress/pkg/util/httpheader"
"github.com/megaease/easegress/pkg/util/httpstat"
"github.com/megaease/easegress/pkg/util/pathadaptor"
"github.com/megaease/easegress/pkg/v"
cron "github.com/robfig/cron/v3"
yaml "gopkg.in/yaml.v2"
)
const (
// Category is the category of Function.
Category = supervisor.CategoryPipeline
// Kind is the kind of Function.
Kind = "Function"
// withoutSecondOpt is the standard cron format of unix.
withoutSecondOpt = cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow | cron.Descriptor
withSecondOpt = cron.Second | withoutSecondOpt
// optionalSecondOpt is not used for now.
optionalSecondOpt = cron.SecondOptional | withSecondOpt
)
func init() |
type (
// Function is Object Function.
Function struct {
super *supervisor.Supervisor
superSpec *supervisor.Spec
spec *Spec
proxy *proxy.Proxy
cron *Cron
requestAdaptor *requestadaptor.RequestAdaptor
}
// Spec describes the Function.
Spec struct {
URL string `yaml:"url" jsonschema:"required"`
Cron *CronSpec `yaml:"cron" jsonschema:"omitempty"`
RequestAdaptor *RequestAdapotorSpec `yaml:"requestAdaptor" jsonschema:"omitempty"`
}
// RequestAdapotorSpec describes the RequestAdaptor.
RequestAdapotorSpec struct {
Method string `yaml:"method" jsonschema:"omitempty,format=httpmethod"`
Path *pathadaptor.Spec `yaml:"path,omitempty" jsonschema:"omitempty"`
Header *httpheader.AdaptSpec `yaml:"header,omitempty" jsonschema:"omitempty"`
}
// Status is the status of Function.
Status struct {
Health string `yaml:"health"`
HTTP *httpstat.Status `yaml:"http"`
Cron *CronStatus `yaml:"cron"`
}
)
// Validate validates Spec.
func (spec Spec) Validate() error {
pipeSpec := spec.proxyPipeSpec()
buff, err := yaml.Marshal(pipeSpec)
if err != nil {
err = fmt.Errorf("BUG: marshal %#v to yaml failed: %v",
pipeSpec, err)
logger.Errorf(err.Error())
return err
}
vr := v.Validate(pipeSpec, buff)
if !vr.Valid() {
return fmt.Errorf("%s", vr.Error())
}
return nil
}
func (spec Spec) proxyPipeSpec() *httppipeline.FilterSpec {
meta := &httppipeline.FilterMetaSpec{
Kind: proxy.Kind,
Name: "proxy",
}
filterSpec := &proxy.Spec{
MainPool: &proxy.PoolSpec{
Servers: []*proxy.Server{
{
URL: spec.URL,
},
},
LoadBalance: &proxy.LoadBalance{Policy: proxy.PolicyRoundRobin},
},
}
pipeSpec, err := httppipeline.NewFilterSpec(meta, filterSpec)
if err != nil {
panic(err)
}
return pipeSpec
}
func (spec Spec) requestAdaptorPipeSpec() *httppipeline.FilterSpec {
meta := &httppipeline.FilterMetaSpec{
Kind: requestadaptor.Kind,
Name: "urlratelimiter",
}
filterSpec := &requestadaptor.Spec{
Method: spec.RequestAdaptor.Method,
Path: spec.RequestAdaptor.Path,
Header: spec.RequestAdaptor.Header,
}
pipeSpec, err := httppipeline.NewFilterSpec(meta, filterSpec)
if err != nil {
panic(err)
}
return pipeSpec
}
// Category returns the category of Function.
func (f *Function) Category() supervisor.ObjectCategory {
return Category
}
// Kind returns the kind of Function.
func (f *Function) Kind() string {
return Kind
}
// DefaultSpec returns the default spec of Function.
func (f *Function) DefaultSpec() interface{} {
return &Spec{}
}
// Init initializes Function.
func (f *Function) Init(superSpec *supervisor.Spec, super *supervisor.Supervisor) {
f.superSpec, f.spec, f.super = superSpec, superSpec.ObjectSpec().(*Spec), super
f.reload()
}
// Inherit inherits previous generation of Function.
func (f *Function) Inherit(superSpec *supervisor.Spec,
previousGeneration supervisor.Object, super *supervisor.Supervisor) {
previousGeneration.Close()
f.Init(superSpec, super)
}
func (f *Function) reload() {
f.proxy = &proxy.Proxy{}
f.proxy.Init(f.spec.proxyPipeSpec(), f.super)
if f.spec.RequestAdaptor != nil {
f.requestAdaptor = &requestadaptor.RequestAdaptor{}
f.requestAdaptor.Init(f.spec.requestAdaptorPipeSpec(), f.super)
}
if f.spec.Cron != nil {
f.cron = NewCron(f.spec.URL, f.spec.Cron)
}
}
// Handle handles all HTTP incoming traffic.
func (f *Function) Handle(ctx context.HTTPContext) {
if f.requestAdaptor != nil {
f.requestAdaptor.Handle(ctx)
}
f.proxy.Handle(ctx)
}
// Status returns Status genreated by Runtime.
func (f *Function) Status() *supervisor.Status {
s := &Status{
HTTP: f.proxy.Status().(*proxy.Status).MainPool.Stat,
}
if f.cron != nil {
s.Cron = f.cron.Status()
}
return &supervisor.Status{
ObjectStatus: s,
}
}
// Close closes Function.
func (f *Function) Close() {
if f.requestAdaptor != nil {
f.requestAdaptor.Close()
}
if f.cron != nil {
f.cron.Close()
}
f.proxy.Close()
}
| {
supervisor.Register(&Function{})
} |
config.entity.ts | import { Entity, Column, PrimaryColumn } from 'typeorm'
@Entity()
export class | {
/**
* 配置项键名
* - 该键为主键且值为唯一值
*/
@PrimaryColumn({
type: 'varchar',
length: 32,
unique: true,
})
key: string
/**
* 配置项值
*/
@Column({
type: 'varchar',
length: 512,
nullable: true,
})
value: string
/**
* 配置项描述
*/
@Column({
type: 'varchar',
length: 128,
nullable: true,
})
description: string
}
| Config |
friends.go | package vkapi
// ============
// FriendsAdd
// ============
// FriendsAddParams параметры метода FriendsAdd.
type FriendsAddParams struct {
UserID uint
Text string
Follow bool
}
// FriendsAdd одобряет или создает заявку на добавление в друзья. Если идентификатор выбранного пользователя присутствует в списке заявок на добавление в друзья, полученном методом friends.getrequests, то одобряет заявку на добавление и добавляет выбранного пользователя в друзья к текущему пользователю. В противном случае создает заявку на добавление в друзья текущего пользователя к выбранному пользователю.
func (api *API) FriendsAdd(p FriendsAddParams) (int, error) {
resp, err := api.Request("friends.add", p, new(int))
if err != nil {
return 0, err
}
return resp.(int), nil
}
// ================
// FriendsAddList
// ================
// FriendsAddListParams параметры метода FriendsAddList.
type FriendsAddListParams struct {
Name string
UserIDS []int
}
// FriendsAddListResp структура, возвращаемая методом FriendsAddList.
type FriendsAddListResp struct {
ListID int `json:"list_id"`
}
// FriendsAddList создает новый список друзей у текущего пользователя.
func (api *API) FriendsAddList(p FriendsAddListParams) (*FriendsAddListResp, error) {
resp, err := api.Request("friends.addList", p, new(FriendsAddListResp))
if err != nil {
return nil, err
}
return resp.(*FriendsAddListResp), nil
}
// ===================
// FriendsAreFriends
// ===================
// FriendsAreFriendsParams параметры метода FriendsAreFriends.
type FriendsAreFriendsParams struct {
UserIDS []int
NeedSign bool
}
// FriendsAreFriendsResp структура, возвращаемая методом FriendsAreFriends.
type FriendsAreFriendsResp []struct {
UserID int `json:"user_id"`
FriendStatus int `json:"friend_status"`
RequestMessage string `json:"request_message"`
ReadState int `json:"read_state"`
Sign string `json:"sign"`
}
// FriendsAreFriends возвращает информацию о том, добавлен ли текущий пользователь в друзья у указанных пользователей. Также возвращает информацию о наличии исходящей или входящей заявки в друзья (подписки).
func (api *API) FriendsAreFriends(p FriendsAreFriendsParams) (*FriendsAreFriendsResp, error) {
resp, err := api.Request("friends.areFriends", p, new(FriendsAreFriendsResp))
if err != nil {
return nil, err
}
return resp.(*FriendsAreFriendsResp), nil
}
// ===============
// FriendsDelete
// ===============
// FriendsDeleteParams параметры метода FriendsDelete.
type FriendsDeleteParams struct {
UserID uint
}
// FriendsDeleteResp структура, возвращаемая методом FriendsDelete.
type FriendsDeleteResp struct {
Success int `json:"success"`
FriendDeleted int `json:"friend_deleted"`
OutRequestDeleted int `json:"out_request_deleted"`
InRequestDeleted int `json:"in_request_deleted"`
SuggestionDeleted int `json:"suggestion_deleted"`
}
// FriendsDelete удаляет пользователя из списка друзей или отклоняет заявку в друзья. Если идентификатор выбранного пользователя присутствует в списке заявок на добавление в друзья, полученном методом friends.getrequests, то отклоняет заявку на добавление в друзья к текущему пользователю. В противном случае удаляет выбранного пользователя из списка друзей текущего пользователя, который может быть получен методом friends.get.
func (api *API) FriendsDelete(p FriendsDeleteParams) (*FriendsDeleteResp, error) {
resp, err := api.Request("friends.delete", p, new(FriendsDeleteResp))
if err != nil {
return nil, err
}
return resp.(*FriendsDeleteResp), nil
}
// ==========================
// FriendsDeleteAllRequests
// ==========================
// FriendsDeleteAllRequests отмечает все входящие заявки на добавление в друзья как просмотренные.
func (api *API) FriendsDeleteAllRequests() (bool, error) {
resp, err := api.Request("friends.deleteAllRequests", struct{}{}, new(int))
if err != nil {
return false, err
}
return toBool(resp.(int)), nil
}
// ===================
// FriendsDeleteList
// ===================
// FriendsDeleteListParams параметры метода FriendsDeleteList.
type FriendsDeleteListParams struct {
ListID uint
}
// FriendsDeleteList удаляет существующий список друзей текущего пользователя.
func (api *API) FriendsDeleteList(p FriendsDeleteListParams) (bool, error) {
resp, err := api.Request("friends.deleteList", p, new(int))
if err != nil {
return false, err
}
return toBool(resp.(int)), nil
}
// =============
// FriendsEdit
// =============
// FriendsEditParams параметры метода FriendsEdit.
type FriendsEditParams struct {
UserID uint
ListIDS []int
}
// FriendsEdit редактирует списки друзей для выбранного друга.
func (api *API) FriendsEdit(p FriendsEditParams) (bool, error) {
resp, err := api.Request("friends.edit", p, new(int))
if err != nil {
return false, err
}
return toBool(resp.(int)), nil
}
// =================
// FriendsEditList
// =================
// FriendsEditListParams параметры метода FriendsEditList.
type FriendsEditListParams struct {
Name string
ListID uint
UserIDS []int
AddUserIDS []int
DeleteUserIDS []int
}
// FriendsEditList редактирует существующий список друзей текущего пользователя.
func (api *API) FriendsEditList(p FriendsEditListParams) (bool, error) {
resp, err := api.Request("friends.editList", p, new(int))
if err != nil {
return false, err
}
return toBool(resp.(int)), nil
}
// ============
// FriendsGet
// ============
// FriendsGetParams параметры метода FriendsGet.
type FriendsGetParams struct {
UserID int
Order string
ListID uint
Count uint
Offset uint
Fields string
NameCase string
Ref string
}
// FriendsGetIDSResp структура, возвращаемая методом FriendsGet, содержит идентификаторы друзей.
type FriendsGetIDSResp struct {
Count int `json:"count"`
Items []int `json:"items"`
}
// FriendsGetUsersResp структура, возвращаемая методом FriendsGet, содержит объекты-профили.
type FriendsGetUsersResp struct {
Count int `json:"count"`
Items []User `json:"items"`
}
// FriendsGet возвращает список идентификаторов друзей пользователя или расширенную информацию о друзьях пользователя (при использовании параметра fields). Если вы используете социальный граф пользователя вконтакте в своем приложении, обратите внимание на п. 4.4. правил платформы. https://vk.com/dev/rules Возвращает *FriendsGetUsersResp, если задан параметр Fields или *FriendsGetIDSResp в остальных случаях.
func (api *API) FriendsGet(p FriendsGetParams) (interface{}, error) {
var holder interface{}
switch len(p.Fields) > 0 {
case true:
holder = new(FriendsGetUsersResp)
default:
holder = new(FriendsGetIDSResp)
}
resp, err := api.Request("friends.get", p, holder)
if err != nil {
return nil, err
}
return resp, nil
}
// ====================
// FriendsGetAppUsers
// ====================
// FriendsGetAppUsers возвращает список идентификаторов друзей текущего пользователя, которые установили данное приложение.
func (api *API) FriendsGetAppUsers() ([]int, error) {
resp, err := api.Request("friends.getAppUsers", struct{}{}, new([]int))
if err != nil {
return nil, err
}
return resp.([]int), nil
}
// ====================
// FriendsGetByPhones
// ====================
// FriendsGetByPhonesParams параметры метода FriendsGetByPhones.
type FriendsGetByPhonesParams struct {
Phones string
Fields string
}
// FriendsGetByPhones возвращает список друзей пользователя, у которых завалидированные или указанные в профиле телефонные номера входят в заданный список. Использование данного метода возможно только если у текущего пользователя завалидирован номер мобильного телефона. Для проверки этого условия можно использовать метод users.get c параметрами user_ids=api_user и fields=has_mobile, где api_user равен идентификатору текущего пользователя. Для доступа к этому методу приложение должно быть доверенным.
func (api *API) FriendsGetByPhones(p FriendsGetByPhonesParams) ([]User, error) {
resp, err := api.Request("friends.getByPhones", p, new([]User))
if err != nil {
return nil, err
}
return resp.([]User), nil
}
// =================
// FriendsGetLists
// =================
// FriendsGetListsParams параметры метода FriendsGetLists.
type FriendsGetListsParams struct {
UserID uint
ReturnSystem bool
}
// FriendsGetListsResp структура, возвращаемая методом FriendsGetLists.
type FriendsGetListsResp struct {
Count int `json:"count"`
Items []struct {
ID int `json:"id"`
Name string `json:"name"`
} `json:"items"`
}
// FriendsGetLists возвращает список меток друзей пользователя.
func (api *API) FriendsGetLists(p FriendsGetListsParams) (*FriendsGetListsResp, error) {
resp, err := api.Request("friends.getLists", p, new(FriendsGetListsResp))
if err != nil {
return nil, err
}
return resp.(*FriendsGetListsResp), nil
}
// ==================
// FriendsGetMutual
// ==================
// FriendsGetMutualParams параметры метода FriendsGetMutual.
type FriendsGetMutualParams struct {
SourceUID uint
TargetUID uint
TargetUIDS []int
Order string
Count uint
Offset uint
}
// FriendsGetMutual возвращает список идентификаторов общих друзей между парой пользователей.
func (api *API) FriendsGetMutual(p FriendsGetMutualParams) ([]int, error) {
resp, err := api.Request("friends.getMutual", p, new([]int))
if err != nil {
return nil, err
}
return resp.([]int), nil
}
// ==================
// FriendsGetOnline
// ==================
// FriendsGetOnlineParams параметры метода FriendsGetOnline.
type FriendsGetOnlineParams struct {
UserID uint
ListID uint
OnlineMobile bool
Order string
Count uint
Offset uint
}
// FriendsGetOnlineResp структура, возвращаемая методом FriendsGetOnline.
type FriendsGetOnlineResp struct {
Online []int `json:"online,omitempty"`
OnlineMobile []int `json:"online_mobile,omitempty"`
}
// FriendsGetOnline возвращает список идентификаторов друзей пользователя, находящихся на сайте. Возвращает []int, если OnlineMobile=false или *FriendsGetOnlineResp, если OnlineMobile=true.
func (api *API) FriendsGetOnline(p FriendsGetOnlineParams) (interface{}, error) {
var holder interface{}
switch p.OnlineMobile {
case true:
holder = new(FriendsGetOnlineResp)
default:
holder = new([]int)
}
resp, err := api.Request("friends.getOnline", p, holder)
if err != nil {
return nil, err
}
return resp, nil
}
// ==================
// FriendsGetRecent
// ==================
// FriendsGetRecentParams параметры метода FriendsGetRecent.
type FriendsGetRecentParams struct {
Count uint
}
// FriendsGetRecent возвращает список идентификаторов недавно добавленных друзей текущего пользователя.
func (api *API) FriendsGetRecent(p FriendsGetRecentParams) ([]int, error) {
resp, err := api.Request("friends.getRecent", p, new([]int))
if err != nil {
return nil, err
}
return resp.([]int), nil
}
// ====================
// FriendsGetRequests
// ====================
// FriendsGetRequestsParams параметры метода FriendsGetRequests.
type FriendsGetRequestsParams struct {
Offset uint
Count uint
Extended bool
NeedMutual bool
Out bool
Sort uint
NeedViewed bool
Suggested bool
Ref string
Fields string
}
// FriendsGetRequestsResp структура, возвращаемая методом FriendsGetRequests.
type FriendsGetRequestsResp struct {
Count int `json:"count"`
Items []struct {
UserID int `json:"user_id"`
FirstName string `json:"first_name"`
LastName string `json:"last_name"`
IsClosed bool `json:"is_closed"`
CanAccessClosed bool `json:"can_access_closed"`
Mutual *struct {
Count int `json:"count"`
Users []int `json:"users"`
} `json:"mutual"`
TrackCode string `json:"track_code"`
} `json:"items"`
}
// FriendsGetRequests возвращает информацию о полученных или отправленных заявках на добавление в друзья для текущего пользователя.
func (api *API) FriendsGetRequests(p FriendsGetRequestsParams) (*FriendsGetRequestsResp, error) {
resp, err := api.Request("friends.getRequests", p, new(FriendsGetRequestsResp))
if err != nil {
return nil, err
}
return resp.(*FriendsGetRequestsResp), nil
}
// =======================
// FriendsGetSuggestions
// =======================
// FriendsGetSuggestionsParams параметры метода FriendsGetSuggestions.
type FriendsGetSuggestionsParams struct {
Filter string
Count uint
Offset uint
Fields string
NameCase string
}
// FriendsGetSuggestionsResp структура, возвращаемая методом FriendsGetSuggestions.
type FriendsGetSuggestionsResp struct {
Count int `json:"count"`
Items []User `json:"items"`
}
// FriendsGetSuggestions возвращает список профилей пользователей, которые могут быть друзьями теку | c (api *API) FriendsGetSuggestions(p FriendsGetSuggestionsParams) (*FriendsGetSuggestionsResp, error) {
resp, err := api.Request("friends.getSuggestions", p, new(FriendsGetSuggestionsResp))
if err != nil {
return nil, err
}
return resp.(*FriendsGetSuggestionsResp), nil
}
// ===============
// FriendsSearch
// ===============
// FriendsSearchParams параметры метода FriendsSearch.
type FriendsSearchParams struct {
UserID uint
Q string
Fields string
NameCase string
Offset uint
Count uint
}
// FriendsSearchResp структура, возвращаемая методом FriendsSearch.
type FriendsSearchResp struct {
Count int `json:"count"`
Items []User `json:"items"`
}
// FriendsSearch позволяет искать по списку друзей пользователей. Для расширенного поиска по списку друзей можно использовать метод users.search с параметром from_list=friends.
func (api *API) FriendsSearch(p FriendsSearchParams) (*FriendsSearchResp, error) {
resp, err := api.Request("friends.search", p, new(FriendsSearchResp))
if err != nil {
return nil, err
}
return resp.(*FriendsSearchResp), nil
}
| щего пользователя.
fun |
seqgras.py | #!/usr/bin/env python
"""MIT - CSAIL - Gifford Lab - seqgra
seqgra complete pipeline:
1. generate data based on data definition (once), see run_simulator.py
2. train model on data (once), see run_learner.py
3. evaluate model performance with SIS, see run_sis.py
@author: Konstantin Krismer
"""
import argparse
import logging
import os
from typing import List, Optional
import seqgra
import seqgra.constants as c
from seqgra import MiscHelper
from seqgra.comparator import Comparator
from seqgra.idresolver import IdResolver
def get_all_grammar_ids(output_dir: str) -> List[str]:
folder = output_dir + "evaluation/"
return [o for o in os.listdir(folder)
if os.path.isdir(os.path.join(folder, o))]
def get_all_model_ids(output_dir: str, grammar_ids: List[str]) -> List[str]:
model_ids: List[str] = []
for grammar_id in grammar_ids:
folder = output_dir + "evaluation/" + grammar_id + "/"
model_ids += [o for o in os.listdir(folder)
if os.path.isdir(os.path.join(folder, o))]
return list(set(model_ids))
def run_seqgra_summary(analysis_id: str,
comparator_ids: List[str],
output_dir: str,
grammar_ids: Optional[List[str]] = None,
model_ids: Optional[List[str]] = None,
set_names: Optional[List[str]] = None,
model_labels: Optional[List[str]] = None) -> None:
analysis_id = MiscHelper.sanitize_id(analysis_id)
output_dir = MiscHelper.format_output_dir(output_dir.strip())
if comparator_ids:
for comparator_id in comparator_ids:
comparator: Comparator = IdResolver.get_comparator(analysis_id,
comparator_id,
output_dir,
model_labels)
if not grammar_ids:
grammar_ids = get_all_grammar_ids(output_dir)
if not model_ids:
model_ids = get_all_model_ids(output_dir, grammar_ids)
comparator.compare_models(grammar_ids, model_ids, set_names)
def create_parser():
parser = argparse.ArgumentParser(
prog="seqgras",
description="seqgra summary: Gather metrics across grammars, models, "
"evaluators")
parser.add_argument(
"-v",
"--version",
action="version",
version="%(prog)s " + seqgra.__version__)
parser.add_argument(
"-a",
"--analysis-id",
type=str,
required=True,
help="analysis id (folder name for output)"
)
parser.add_argument(
"-c",
"--comparators",
type=str,
required=True,
nargs="+",
help="comparator ID or IDs: IDs of "
"comparators include " +
", ".join(sorted(c.ComparatorID.ALL_COMPARATOR_IDS))
)
parser.add_argument(
"-o",
"--output-dir",
type=str,
required=True,
help="output directory, subdirectories are created for generated "
"data, trained model, and model evaluation"
)
parser.add_argument(
"-g",
"--grammar-ids",
type=str,
default=None,
nargs="+",
help="one or more grammar IDs; defaults to all grammar IDs in "
"output dir"
)
parser.add_argument(
"-m",
"--model-ids",
type=str,
default=None,
nargs="+",
help="one or more model IDs; defaults to all model IDs for specified "
"grammars in output dir"
)
parser.add_argument(
"-s",
"--sets",
type=str,
default=["test"],
nargs="+",
help="one or more of the following: training, validation, or test"
)
parser.add_argument(
"-l",
"--model-labels",
type=str,
default=None,
nargs="+",
help="labels for models, must be same length as model_ids"
)
return parser
def main():
logging.basicConfig(level=logging.INFO)
parser = create_parser()
args = parser.parse_args()
for comparator in args.comparators:
if comparator not in c.ComparatorID.ALL_COMPARATOR_IDS:
|
run_seqgra_summary(args.analysis_id,
args.comparators,
args.output_dir,
args.grammar_ids,
args.model_ids,
args.sets,
args.model_labels)
if __name__ == "__main__":
main()
| raise ValueError(
"invalid comparator ID {s!r}".format(s=comparator)) |
route_validation_test.go | /*
Copyright 2018 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
"strings"
"testing"
"github.com/google/go-cmp/cmp"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/knative/pkg/apis"
)
func TestRouteValidation(t *testing.T) {
tests := []struct {
name string
r *Route
want *apis.FieldError
}{{
name: "valid",
r: &Route{
ObjectMeta: metav1.ObjectMeta{
Name: "valid",
},
Spec: RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "foo",
Percent: 100,
}},
},
},
want: nil,
}, {
name: "valid split",
r: &Route{
ObjectMeta: metav1.ObjectMeta{
Name: "valid",
},
Spec: RouteSpec{
Traffic: []TrafficTarget{{
Name: "prod",
RevisionName: "foo",
Percent: 90,
}, {
Name: "experiment",
ConfigurationName: "bar",
Percent: 10,
}},
},
},
want: nil,
}, {
name: "invalid traffic entry",
r: &Route{
ObjectMeta: metav1.ObjectMeta{
Name: "valid",
},
Spec: RouteSpec{
Traffic: []TrafficTarget{{
Name: "foo",
Percent: 100,
}},
},
},
want: &apis.FieldError{
Message: "expected exactly one, got neither",
Paths: []string{
"spec.traffic[0].configurationName",
"spec.traffic[0].revisionName",
},
},
}, {
name: "invalid name - dots",
r: &Route{
ObjectMeta: metav1.ObjectMeta{
Name: "do.not.use.dots",
},
Spec: RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "foo",
Percent: 100,
}},
},
},
want: &apis.FieldError{
Message: "not a DNS 1035 label: [a DNS-1035 label must consist of lower case alphanumeric characters or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', or 'abc-123', regex used for validation is '[a-z]([-a-z0-9]*[a-z0-9])?')]",
Paths: []string{"metadata.name"},
},
}, {
name: "invalid name - dots and spec percent is not 100",
r: &Route{
ObjectMeta: metav1.ObjectMeta{
Name: "do.not.use.dots",
},
Spec: RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "foo",
Percent: 90,
}},
},
},
want: (&apis.FieldError{
Message: "not a DNS 1035 label: [a DNS-1035 label must consist of lower case alphanumeric characters or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', or 'abc-123', regex used for validation is '[a-z]([-a-z0-9]*[a-z0-9])?')]",
Paths: []string{"metadata.name"},
}).Also(&apis.FieldError{
Message: "Traffic targets sum to 90, want 100",
Paths: []string{"spec.traffic"},
}),
}, {
name: "invalid name - too long",
r: &Route{
ObjectMeta: metav1.ObjectMeta{
Name: strings.Repeat("a", 64),
},
Spec: RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "foo",
Percent: 100,
}},
},
},
want: &apis.FieldError{
Message: "not a DNS 1035 label: [must be no more than 63 characters]",
Paths: []string{"metadata.name"},
},
}}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
got := test.r.Validate()
if diff := cmp.Diff(test.want.Error(), got.Error()); diff != "" {
t.Errorf("Validate (-want, +got) = %v", diff)
}
})
}
}
func TestRouteSpecValidation(t *testing.T) {
multipleDefinitionError := &apis.FieldError{
Message: `Multiple definitions for "foo"`,
Paths: []string{"traffic[0].name", "traffic[1].name"},
}
tests := []struct {
name string
rs *RouteSpec
want *apis.FieldError
}{{
name: "valid",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "foo",
Percent: 100,
}},
},
want: nil,
}, {
name: "valid split",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
Name: "prod",
RevisionName: "foo",
Percent: 90,
}, {
Name: "experiment",
ConfigurationName: "bar",
Percent: 10,
}},
},
want: nil,
}, {
name: "empty spec",
rs: &RouteSpec{},
want: apis.ErrMissingField(apis.CurrentField),
}, {
name: "invalid traffic entry",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
Name: "foo",
Percent: 100,
}},
},
want: &apis.FieldError{
Message: "expected exactly one, got neither",
Paths: []string{
"traffic[0].configurationName",
"traffic[0].revisionName",
},
},
}, {
name: "invalid revision name",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "b@r",
Percent: 100,
}},
},
want: &apis.FieldError{
Message: `invalid key name "b@r"`,
Paths: []string{"traffic[0].revisionName"},
Details: `name part must consist of alphanumeric characters, '-', '_' or '.', and must start and end with an alphanumeric character (e.g. 'MyName', or 'my.name', or '123-abc', regex used for validation is '([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]')`,
},
}, {
name: "invalid revision name",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
ConfigurationName: "f**",
Percent: 100,
}},
},
want: &apis.FieldError{
Message: `invalid key name "f**"`,
Paths: []string{"traffic[0].configurationName"},
Details: `name part must consist of alphanumeric characters, '-', '_' or '.', and must start and end with an alphanumeric character (e.g. 'MyName', or 'my.name', or '123-abc', regex used for validation is '([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]')`,
},
}, {
name: "invalid name conflict",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
Name: "foo",
RevisionName: "bar",
Percent: 50,
}, {
Name: "foo",
RevisionName: "baz",
Percent: 50,
}},
},
want: multipleDefinitionError,
}, {
name: "collision (same revision)",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
Name: "foo",
RevisionName: "bar",
Percent: 50,
}, {
Name: "foo",
RevisionName: "bar",
Percent: 50,
}},
},
want: multipleDefinitionError,
}, {
name: "collision (same config)",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
Name: "foo",
ConfigurationName: "bar",
Percent: 50,
}, {
Name: "foo",
ConfigurationName: "bar",
Percent: 50,
}},
},
want: multipleDefinitionError,
}, {
name: "invalid total percentage",
rs: &RouteSpec{
Traffic: []TrafficTarget{{
RevisionName: "bar",
Percent: 99,
}, {
RevisionName: "baz",
Percent: 99,
}},
},
want: &apis.FieldError{
Message: "Traffic targets sum to 198, want 100",
Paths: []string{"traffic"},
},
}}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
got := test.rs.Validate()
if diff := cmp.Diff(test.want.Error(), got.Error()); diff != "" {
t.Errorf("Validate (-want, +got) = %v", diff)
}
})
}
}
func TestTrafficTargetValidation(t *testing.T) | {
tests := []struct {
name string
tt *TrafficTarget
want *apis.FieldError
}{{
name: "valid with name and revision",
tt: &TrafficTarget{
Name: "foo",
RevisionName: "bar",
Percent: 12,
},
want: nil,
}, {
name: "valid with name and configuration",
tt: &TrafficTarget{
Name: "baz",
ConfigurationName: "blah",
Percent: 37,
},
want: nil,
}, {
name: "valid with no percent",
tt: &TrafficTarget{
Name: "ooga",
ConfigurationName: "booga",
},
want: nil,
}, {
name: "valid with no name",
tt: &TrafficTarget{
ConfigurationName: "booga",
Percent: 100,
},
want: nil,
}, {
name: "invalid with both",
tt: &TrafficTarget{
RevisionName: "foo",
ConfigurationName: "bar",
},
want: &apis.FieldError{
Message: "expected exactly one, got both",
Paths: []string{"revisionName", "configurationName"},
},
}, {
name: "invalid with neither",
tt: &TrafficTarget{
Name: "foo",
Percent: 100,
},
want: &apis.FieldError{
Message: "expected exactly one, got neither",
Paths: []string{"revisionName", "configurationName"},
},
}, {
name: "invalid percent too low",
tt: &TrafficTarget{
RevisionName: "foo",
Percent: -5,
},
want: apis.ErrOutOfBoundsValue("-5", "0", "100", "percent"),
}, {
name: "invalid percent too high",
tt: &TrafficTarget{
RevisionName: "foo",
Percent: 101,
},
want: apis.ErrOutOfBoundsValue("101", "0", "100", "percent"),
}}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
got := test.tt.Validate()
if diff := cmp.Diff(test.want.Error(), got.Error()); diff != "" {
t.Errorf("Validate (-want, +got) = %v", diff)
}
})
}
} |
|
basic.py | # -*- test-case-name: twisted.web2.test.test_httpauth -*-
from twisted.cred import credentials, error
from twisted.web2.auth.interfaces import ICredentialFactory
from zope.interface import implements
class BasicCredentialFactory(object):
| """
Credential Factory for HTTP Basic Authentication
"""
implements(ICredentialFactory)
scheme = 'basic'
def __init__(self, realm):
self.realm = realm
def getChallenge(self, peer):
return {'realm': self.realm}
def decode(self, response, request):
try:
creds = (response + '===').decode('base64')
except:
raise error.LoginFailed('Invalid credentials')
creds = creds.split(':', 1)
if len(creds) == 2:
return credentials.UsernamePassword(*creds)
else:
raise error.LoginFailed('Invalid credentials') |
|
stage01_rnasequencing_genesCountTable_io.py | #system
import json
#sbaas
from .stage01_rnasequencing_genesCountTable_query import stage01_rnasequencing_genesCountTable_query
from .stage01_rnasequencing_analysis_query import stage01_rnasequencing_analysis_query
from SBaaS_base.sbaas_template_io import sbaas_template_io
# Resources
from io_utilities.base_importData import base_importData
from io_utilities.base_exportData import base_exportData
from sequencing_analysis.genes_countFPKMattr_table import genes_countFPKMattr_table
from ddt_python.ddt_container_filterMenuAndChart2dAndTable import ddt_container_filterMenuAndChart2dAndTable
from ddt_python.ddt_container import ddt_container
from listDict.listDict import listDict
from math import log2
class stage01_rnasequencing_genesCountTable_io(
stage01_rnasequencing_genesCountTable_query,
stage01_rnasequencing_analysis_query,
sbaas_template_io):
def import_dataStage01RNASequencingGenesCountTable_add(
self,genes_count_table_dir,genes_fpkm_table_dir,
genes_attr_table_dir,
analysis_id_I,experiment_ids_I,samples_host_dirs_I,sample_names_I):
'''table adds'''
countFPKMattr = genes_countFPKMattr_table();
countFPKMattr.import_countTable(
filename_I=genes_count_table_dir,);
countFPKMattr.import_fpkmTable(
filename_I=genes_fpkm_table_dir,);
countFPKMattr.import_attrTable(
filename_I=genes_attr_table_dir,);
#parse the filenames and samplenames
sna2sns_I={};
sna2experimentID_I={};
sample_names_lst = sample_names_I.split(',');
experiment_ids_lst = experiment_ids_I.split(',');
for cnt,sample_replicates in enumerate(samples_host_dirs_I.split('|')):
sna2sns_I[sample_names_lst[cnt]] = [];
sna2experimentID_I[sample_names_lst[cnt]] = experiment_ids_lst[cnt];
for sample in sample_replicates.split(','):
filename = sample.split('/')[-1].replace('.bam','').replace('.fastq','');
sna2sns_I[sample_names_lst[cnt]].append(filename);
genesCountTable = countFPKMattr.alignAndReformat_countFPKMattrTables(
analysis_id_I = analysis_id_I,
sna2experimentID_I = sna2experimentID_I,
sna2sns_I = sna2sns_I)
self.add_dataStage01RNASequencingGenesCountTable(genesCountTable);
def import_dataStage01RNASequencingGenesCountTable_update(self, filename):
|
def export_dataStage01RNASequencingGenesCountTable_js(self,analysis_id_I,data_dir_I='tmp'):
'''Export data for a box and whiskers plot'''
# get the analysis information
experiment_ids,sample_names = [],[];
experiment_ids,sample_names = self.get_experimentIDAndSampleName_analysisID_dataStage01RNASequencingAnalysis(analysis_id_I);
data_O = [];
for sample_name_cnt,sample_name in enumerate(sample_names):
# query fpkm data:
fpkms = [];
fpkms = self.get_rows_experimentIDAndSampleName_dataStage01RNASequencingGenesCountTable(experiment_ids[sample_name_cnt],sample_name);
data_O.extend(fpkms);
# dump chart parameters to a js files
data1_keys = ['experiment_id','sample_name','gene_short_name'
];
data1_nestkeys = ['gene_short_name'];
data1_keymap = {'xdata':'gene_short_name',
'ydatamean':'FPKM',
'ydatalb':'FPKM_conf_lo',
'ydataub':'FPKM_conf_hi',
'serieslabel':'sample_name',
'featureslabel':'gene_short_name'};
# make the data object
dataobject_O = [{"data":data_O,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}];
# make the tile parameter objects
formtileparameters_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-4"};
formparameters_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}};
formtileparameters_O.update(formparameters_O);
svgparameters_O = {"svgtype":'boxandwhiskersplot2d_02',"svgkeymap":[data1_keymap],
'svgid':'svg1',
"svgmargin":{ 'top': 50, 'right': 150, 'bottom': 50, 'left': 50 },
"svgwidth":500,"svgheight":350,
"svgx1axislabel":"gene","svgy1axislabel":"FPKM",
'svgformtileid':'filtermenu1','svgresetbuttonid':'reset1','svgsubmitbuttonid':'submit1'};
svgtileparameters_O = {'tileheader':'Custom box and whiskers plot','tiletype':'svg','tileid':"tile2",'rowid':"row1",'colid':"col2",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-8"};
svgtileparameters_O.update(svgparameters_O);
tableparameters_O = {"tabletype":'responsivetable_01',
'tableid':'table1',
"tablefilters":None,
"tableclass":"table table-condensed table-hover",
'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'};
tabletileparameters_O = {'tileheader':'FPKM','tiletype':'table','tileid':"tile3",'rowid':"row2",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"};
tabletileparameters_O.update(tableparameters_O);
parametersobject_O = [formtileparameters_O,svgtileparameters_O,tabletileparameters_O];
tile2datamap_O = {"filtermenu1":[0],"tile2":[0],"tile3":[0]};
# dump the data to a json file
ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = None);
if data_dir_I=='tmp':
filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js'
elif data_dir_I=='data_json':
data_json_O = ddtutilities.get_allObjects_js();
return data_json_O;
with open(filename_str,'w') as file:
file.write(ddtutilities.get_allObjects());
def export_dataStage01RNASequencingGenesCountTable_pairWisePlot_js(self,analysis_id_I,log2normalization_I=True,data_dir_I='tmp'):
'''Export data for a pairwise scatter plot
INPUT:
analysis_id = String, analysis_id
log2normalization_I = Boolean, apply a log2 normalization the FPKM values (default: True)
data_dir_I = string, data directory
OUTPUT:
'''
# get the analysis information
experiment_ids,sample_names = [],[];
experiment_ids,sample_names = self.get_experimentIDAndSampleName_analysisID_dataStage01RNASequencingAnalysis(analysis_id_I);
data_O = [];
for sample_name_cnt,sample_name in enumerate(sample_names):
# query fpkm data:
fpkms = [];
fpkms = self.get_rows_experimentIDAndSampleName_dataStage01RNASequencingGenesCountTable(experiment_ids[sample_name_cnt],sample_name);
if log2normalization_I:
for f in fpkms:
if f['FPKM'] == 0.0: f['FPKM'] = 0.0;
else: f['FPKM'] = log2(f['FPKM']);
data_O.extend(fpkms);
# reorganize the data
listdict = listDict(data_O);
data_O,columnValueHeader_O = listdict.convert_listDict2ColumnGroupListDict(
#value_labels_I = ['FPKM','FPKM_conf_lo','FPKM_conf_hi'],
value_labels_I = ['FPKM',],
column_labels_I = ['experiment_id','sample_name'],
feature_labels_I = ['gene_id','gene_short_name'],
na_str_I=0.0,
columnValueConnector_str_I='_',
);
# make the tile object
#data1 = filtermenu/table
data1_keymap_table = {
'xdata':'svd_method',
'ydata':'singular_value_index',
'zdata':'d_vector',
'rowslabel':'svd_method',
'columnslabel':'singular_value_index',
};
#data2 = svg
#if single plot, data2 = filter menu, data2, and table
data1_keys = ['gene_id','gene_short_name'
];
data1_nestkeys = ['gene_short_name'];
data1_keymap_svg = [];
svgtype = [];
svgtile2datamap = [];
data_svg_keymap = [];
for cnt1,column1 in enumerate(columnValueHeader_O):
for cnt2,column2 in enumerate(columnValueHeader_O[cnt1+1:]):
keymap = {
'xdata':column1,
'ydata':column2,
'serieslabel':'',
'featureslabel':'gene_short_name',
'tooltipdata':'gene_short_name',
};
data1_keymap_svg.append([keymap]);
data_svg_keymap.append(keymap);
svgtype.append('pcaplot2d_scores_01');
svgtile2datamap.append([0]);
nsvgtable = ddt_container_filterMenuAndChart2dAndTable();
nsvgtable.make_filterMenuAndChart2dAndTable(
data_filtermenu=data_O,
data_filtermenu_keys=data1_keys,
data_filtermenu_nestkeys=data1_nestkeys,
data_filtermenu_keymap=data1_keymap_table,
data_svg_keys=data1_keys,
data_svg_nestkeys=data1_nestkeys,
data_svg_keymap=data_svg_keymap,
data_table_keys=data1_keys,
data_table_nestkeys=data1_nestkeys,
data_table_keymap=data1_keymap_table,
data_svg=None,
data_table=None,
svgtype=svgtype,
tabletype='responsivetable_01',
svgx1axislabel='',
svgy1axislabel='',
tablekeymap = [data1_keymap_table],
svgkeymap = data1_keymap_svg,
formtile2datamap=[0],
tabletile2datamap=[0],
svgtile2datamap=svgtile2datamap,
svgfilters=None,
svgtileheader='Pair-wise scatter plot',
tablefilters=None,
tableheaders=None
);
if data_dir_I=='tmp':
filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js'
elif data_dir_I=='data_json':
data_json_O = nsvgtable.get_allObjects_js();
return data_json_O;
with open(filename_str,'w') as file:
file.write(nsvgtable.get_allObjects()); | '''table adds'''
data = base_importData();
data.read_csv(filename);
data.format_data();
self.update_dataStage01RNASequencingGenesCountTable(data.data);
data.clear_data(); |
feed.rs | extern crate failure;
extern crate hypercore;
extern crate random_access_memory as ram;
mod helpers;
use helpers::{copy_keys, create_feed};
use hypercore::{generate_keypair, Feed, NodeTrait, Storage};
#[test]
fn create_with_key() {
let keypair = generate_keypair();
let storage = Storage::new_memory().unwrap();
let _feed = Feed::builder(keypair.public, storage)
.secret_key(keypair.secret)
.build()
.unwrap();
}
#[test]
fn display() {
let feed = create_feed(50).unwrap();
let output = format!("{}", feed);
assert_eq!(output.len(), 61);
}
#[test]
/// Verify `.append()` and `.get()` work.
fn set_get() {
let mut feed = create_feed(50).unwrap();
feed.append(b"hello").unwrap();
feed.append(b"world").unwrap();
assert_eq!(feed.get(0).unwrap(), Some(b"hello".to_vec()));
assert_eq!(feed.get(1).unwrap(), Some(b"world".to_vec()));
}
#[test]
fn append() {
let mut feed = create_feed(50).unwrap();
feed.append(br#"{"hello":"world"}"#).unwrap();
feed.append(br#"{"hello":"mundo"}"#).unwrap();
feed.append(br#"{"hello":"welt"}"#).unwrap();
assert_eq!(feed.len(), 3);
assert_eq!(feed.byte_len(), 50);
assert_eq!(feed.get(0).unwrap(), Some(br#"{"hello":"world"}"#.to_vec()));
assert_eq!(feed.get(1).unwrap(), Some(br#"{"hello":"mundo"}"#.to_vec()));
assert_eq!(feed.get(2).unwrap(), Some(br#"{"hello":"welt"}"#.to_vec()));
}
#[test]
/// Verify the `.root_hashes()` method returns the right nodes.
fn root_hashes() {
// If no roots exist we should get an error.
let mut feed = create_feed(50).unwrap();
let res = feed.root_hashes(0);
assert!(res.is_err());
// If 1 entry exists, [0] should be the root.
feed.append(b"data").unwrap();
let roots = feed.root_hashes(0).unwrap();
assert_eq!(roots.len(), 1);
assert_eq!(roots[0].index(), 0);
// If we query out of bounds, we should get an error.
let res = feed.root_hashes(6);
assert!(res.is_err());
// If 3 entries exist, [2,4] should be the roots.
feed.append(b"data").unwrap();
feed.append(b"data").unwrap();
let roots = feed.root_hashes(2).unwrap();
assert_eq!(roots.len(), 2);
assert_eq!(roots[0].index(), 1);
assert_eq!(roots[1].index(), 4);
}
#[test]
fn verify() |
#[test]
fn put() {
let mut a = create_feed(50).unwrap();
let (public, secret) = copy_keys(&a);
let storage = Storage::new(|_| Ok(ram::RandomAccessMemory::new(50))).unwrap();
let mut b = Feed::builder(public, storage)
.secret_key(secret)
.build()
.unwrap();
for _ in 0..10 {
a.append(b"foo").unwrap();
}
let proof = a.proof(0, true).unwrap();
b.put(0, None, proof).expect("no error");
let proof = a
.proof_with_digest(4, b.digest(4), true)
.expect(".proof() index 4, digest 4");
b.put(4, None, proof).unwrap();
}
#[test]
fn create_with_storage() {
let storage = Storage::new_memory().unwrap();
assert!(
Feed::with_storage(storage).is_ok(),
"Could not create a feed with a storage."
);
}
#[test]
fn create_with_stored_public_key() {
let mut storage = Storage::new_memory().unwrap();
let keypair = generate_keypair();
storage.write_public_key(&keypair.public).unwrap();
assert!(
Feed::with_storage(storage).is_ok(),
"Could not create a feed with a stored public key."
);
}
#[test]
fn create_with_stored_keys() {
let mut storage = Storage::new_memory().unwrap();
let keypair = generate_keypair();
storage.write_public_key(&keypair.public).unwrap();
storage.write_secret_key(&keypair.secret).unwrap();
assert!(
Feed::with_storage(storage).is_ok(),
"Could not create a feed with a stored keypair."
);
}
| {
let mut feed = create_feed(50).unwrap();
let (public, secret) = copy_keys(&feed);
let feed_bytes = secret.to_bytes().to_vec();
let storage = Storage::new(|_| Ok(ram::RandomAccessMemory::new(50))).unwrap();
let mut evil_feed = Feed::builder(public, storage)
.secret_key(secret)
.build()
.unwrap();
let evil_bytes = match &feed.secret_key() {
Some(key) => key.to_bytes(),
None => panic!("no secret key found"),
};
// Verify the keys are the same.
assert_eq!(&feed_bytes, &evil_bytes.to_vec());
// Verify that the signature on a single feed is correct.
feed.append(b"test").unwrap();
let sig = feed.signature(0).unwrap();
feed.verify(0, &sig).unwrap();
// Verify that the signature between two different feeds is different.
evil_feed.append(b"t0st").unwrap();
let res = evil_feed.verify(0, &sig);
assert!(res.is_err());
} |
properties.go | // Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2010 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package proto
/*
* Routines for encoding data into the wire format for protocol buffers.
*/
import (
"fmt"
"log"
"os"
"reflect"
"sort"
"strconv"
"strings"
"sync"
)
const debug bool = false
// Constants that identify the encoding of a value on the wire.
const (
WireVarint = 0
WireFixed64 = 1
WireBytes = 2
WireStartGroup = 3
WireEndGroup = 4
WireFixed32 = 5
)
// tagMap is an optimization over map[int]int for typical protocol buffer
// use-cases. Encoded protocol buffers are often in tag order with small tag
// numbers.
type tagMap struct {
fastTags []int
slowTags map[int]int
}
// tagMapFastLimit is the upper bound on the tag number that will be stored in
// the tagMap slice rather than its map.
const tagMapFastLimit = 1024
func (p *tagMap) get(t int) (int, bool) {
if t > 0 && t < tagMapFastLimit {
if t >= len(p.fastTags) {
return 0, false
}
fi := p.fastTags[t]
return fi, fi >= 0
}
fi, ok := p.slowTags[t]
return fi, ok
}
func (p *tagMap) put(t int, fi int) {
if t > 0 && t < tagMapFastLimit {
for len(p.fastTags) < t+1 {
p.fastTags = append(p.fastTags, -1)
}
p.fastTags[t] = fi
return
}
if p.slowTags == nil {
p.slowTags = make(map[int]int)
}
p.slowTags[t] = fi
}
// StructProperties represents properties for all the fields of a struct.
// decoderTags and decoderOrigNames should only be used by the decoder.
type StructProperties struct {
Prop []*Properties // properties for each field
reqCount int // required count
decoderTags tagMap // map from proto tag to struct field number
decoderOrigNames map[string]int // map from original name to struct field number
order []int // list of struct field numbers in tag order
// OneofTypes contains information about the oneof fields in this message.
// It is keyed by the original name of a field.
OneofTypes map[string]*OneofProperties
}
// OneofProperties represents information about a specific field in a oneof.
type OneofProperties struct {
Type reflect.Type // pointer to generated struct type for this oneof field
Field int // struct field number of the containing oneof in the message
Prop *Properties
}
// Implement the sorting interface so we can sort the fields in tag order, as recommended by the spec. | func (sp *StructProperties) Less(i, j int) bool {
return sp.Prop[sp.order[i]].Tag < sp.Prop[sp.order[j]].Tag
}
func (sp *StructProperties) Swap(i, j int) { sp.order[i], sp.order[j] = sp.order[j], sp.order[i] }
// Properties represents the protocol-specific behavior of a single struct field.
type Properties struct {
Name string // name of the field, for error messages
OrigName string // original name before protocol compiler (always set)
JSONName string // name to use for JSON; determined by protoc
Wire string
WireType int
Tag int
Required bool
Optional bool
Repeated bool
Packed bool // relevant for repeated primitives only
Enum string // set for enum types only
proto3 bool // whether this is known to be a proto3 field; set for []byte only
oneof bool // whether this is a oneof field
Default string // default value
HasDefault bool // whether an explicit default was provided
stype reflect.Type // set for struct types only
sprop *StructProperties // set for struct types only
mtype reflect.Type // set for map types only
mkeyprop *Properties // set for map types only
mvalprop *Properties // set for map types only
}
// String formats the properties in the protobuf struct field tag style.
func (p *Properties) String() string {
s := p.Wire
s += ","
s += strconv.Itoa(p.Tag)
if p.Required {
s += ",req"
}
if p.Optional {
s += ",opt"
}
if p.Repeated {
s += ",rep"
}
if p.Packed {
s += ",packed"
}
s += ",name=" + p.OrigName
if p.JSONName != p.OrigName {
s += ",json=" + p.JSONName
}
if p.proto3 {
s += ",proto3"
}
if p.oneof {
s += ",oneof"
}
if len(p.Enum) > 0 {
s += ",enum=" + p.Enum
}
if p.HasDefault {
s += ",def=" + p.Default
}
return s
}
// Parse populates p by parsing a string in the protobuf struct field tag style.
func (p *Properties) Parse(s string) {
// "bytes,49,opt,name=foo,def=hello!"
fields := strings.Split(s, ",") // breaks def=, but handled below.
if len(fields) < 2 {
fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s)
return
}
p.Wire = fields[0]
switch p.Wire {
case "varint":
p.WireType = WireVarint
case "fixed32":
p.WireType = WireFixed32
case "fixed64":
p.WireType = WireFixed64
case "zigzag32":
p.WireType = WireVarint
case "zigzag64":
p.WireType = WireVarint
case "bytes", "group":
p.WireType = WireBytes
// no numeric converter for non-numeric types
default:
fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s)
return
}
var err error
p.Tag, err = strconv.Atoi(fields[1])
if err != nil {
return
}
outer:
for i := 2; i < len(fields); i++ {
f := fields[i]
switch {
case f == "req":
p.Required = true
case f == "opt":
p.Optional = true
case f == "rep":
p.Repeated = true
case f == "packed":
p.Packed = true
case strings.HasPrefix(f, "name="):
p.OrigName = f[5:]
case strings.HasPrefix(f, "json="):
p.JSONName = f[5:]
case strings.HasPrefix(f, "enum="):
p.Enum = f[5:]
case f == "proto3":
p.proto3 = true
case f == "oneof":
p.oneof = true
case strings.HasPrefix(f, "def="):
p.HasDefault = true
p.Default = f[4:] // rest of string
if i+1 < len(fields) {
// Commas aren't escaped, and def is always last.
p.Default += "," + strings.Join(fields[i+1:], ",")
break outer
}
}
}
}
var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem()
// setFieldProps initializes the field properties for submessages and maps.
func (p *Properties) setFieldProps(typ reflect.Type, f *reflect.StructField, lockGetProp bool) {
switch t1 := typ; t1.Kind() {
case reflect.Ptr:
if t1.Elem().Kind() == reflect.Struct {
p.stype = t1.Elem()
}
case reflect.Slice:
if t2 := t1.Elem(); t2.Kind() == reflect.Ptr && t2.Elem().Kind() == reflect.Struct {
p.stype = t2.Elem()
}
case reflect.Map:
p.mtype = t1
p.mkeyprop = &Properties{}
p.mkeyprop.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp)
p.mvalprop = &Properties{}
vtype := p.mtype.Elem()
if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice {
// The value type is not a message (*T) or bytes ([]byte),
// so we need encoders for the pointer to this type.
vtype = reflect.PtrTo(vtype)
}
p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp)
}
if p.stype != nil {
if lockGetProp {
p.sprop = GetProperties(p.stype)
} else {
p.sprop = getPropertiesLocked(p.stype)
}
}
}
var (
marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem()
)
// Init populates the properties from a protocol buffer struct tag.
func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) {
p.init(typ, name, tag, f, true)
}
func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructField, lockGetProp bool) {
// "bytes,49,opt,def=hello!"
p.Name = name
p.OrigName = name
if tag == "" {
return
}
p.Parse(tag)
p.setFieldProps(typ, f, lockGetProp)
}
var (
propertiesMu sync.RWMutex
propertiesMap = make(map[reflect.Type]*StructProperties)
)
// GetProperties returns the list of properties for the type represented by t.
// t must represent a generated struct type of a protocol message.
func GetProperties(t reflect.Type) *StructProperties {
if t.Kind() != reflect.Struct {
panic("proto: type must have kind struct")
}
// Most calls to GetProperties in a long-running program will be
// retrieving details for types we have seen before.
propertiesMu.RLock()
sprop, ok := propertiesMap[t]
propertiesMu.RUnlock()
if ok {
if collectStats {
stats.Chit++
}
return sprop
}
propertiesMu.Lock()
sprop = getPropertiesLocked(t)
propertiesMu.Unlock()
return sprop
}
// getPropertiesLocked requires that propertiesMu is held.
func getPropertiesLocked(t reflect.Type) *StructProperties {
if prop, ok := propertiesMap[t]; ok {
if collectStats {
stats.Chit++
}
return prop
}
if collectStats {
stats.Cmiss++
}
prop := new(StructProperties)
// in case of recursive protos, fill this in now.
propertiesMap[t] = prop
// build properties
prop.Prop = make([]*Properties, t.NumField())
prop.order = make([]int, t.NumField())
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
p := new(Properties)
name := f.Name
p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false)
oneof := f.Tag.Get("protobuf_oneof") // special case
if oneof != "" {
// Oneof fields don't use the traditional protobuf tag.
p.OrigName = oneof
}
prop.Prop[i] = p
prop.order[i] = i
if debug {
print(i, " ", f.Name, " ", t.String(), " ")
if p.Tag > 0 {
print(p.String())
}
print("\n")
}
}
// Re-order prop.order.
sort.Sort(prop)
type oneofMessage interface {
XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
}
if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok {
var oots []interface{}
_, _, _, oots = om.XXX_OneofFuncs()
// Interpret oneof metadata.
prop.OneofTypes = make(map[string]*OneofProperties)
for _, oot := range oots {
oop := &OneofProperties{
Type: reflect.ValueOf(oot).Type(), // *T
Prop: new(Properties),
}
sft := oop.Type.Elem().Field(0)
oop.Prop.Name = sft.Name
oop.Prop.Parse(sft.Tag.Get("protobuf"))
// There will be exactly one interface field that
// this new value is assignable to.
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
if f.Type.Kind() != reflect.Interface {
continue
}
if !oop.Type.AssignableTo(f.Type) {
continue
}
oop.Field = i
break
}
prop.OneofTypes[oop.Prop.OrigName] = oop
}
}
// build required counts
// build tags
reqCount := 0
prop.decoderOrigNames = make(map[string]int)
for i, p := range prop.Prop {
if strings.HasPrefix(p.Name, "XXX_") {
// Internal fields should not appear in tags/origNames maps.
// They are handled specially when encoding and decoding.
continue
}
if p.Required {
reqCount++
}
prop.decoderTags.put(p.Tag, i)
prop.decoderOrigNames[p.OrigName] = i
}
prop.reqCount = reqCount
return prop
}
// A global registry of enum types.
// The generated code will register the generated maps by calling RegisterEnum.
var enumValueMaps = make(map[string]map[string]int32)
// RegisterEnum is called from the generated code to install the enum descriptor
// maps into the global table to aid parsing text format protocol buffers.
func RegisterEnum(typeName string, unusedNameMap map[int32]string, valueMap map[string]int32) {
if _, ok := enumValueMaps[typeName]; ok {
panic("proto: duplicate enum registered: " + typeName)
}
enumValueMaps[typeName] = valueMap
}
// EnumValueMap returns the mapping from names to integers of the
// enum type enumType, or a nil if not found.
func EnumValueMap(enumType string) map[string]int32 {
return enumValueMaps[enumType]
}
// A registry of all linked message types.
// The string is a fully-qualified proto name ("pkg.Message").
var (
protoTypedNils = make(map[string]Message) // a map from proto names to typed nil pointers
protoMapTypes = make(map[string]reflect.Type) // a map from proto names to map types
revProtoTypes = make(map[reflect.Type]string)
)
// RegisterType is called from generated code and maps from the fully qualified
// proto name to the type (pointer to struct) of the protocol buffer.
func RegisterType(x Message, name string) {
if _, ok := protoTypedNils[name]; ok {
// TODO: Some day, make this a panic.
log.Printf("proto: duplicate proto type registered: %s", name)
return
}
t := reflect.TypeOf(x)
if v := reflect.ValueOf(x); v.Kind() == reflect.Ptr && v.Pointer() == 0 {
// Generated code always calls RegisterType with nil x.
// This check is just for extra safety.
protoTypedNils[name] = x
} else {
protoTypedNils[name] = reflect.Zero(t).Interface().(Message)
}
revProtoTypes[t] = name
}
// RegisterMapType is called from generated code and maps from the fully qualified
// proto name to the native map type of the proto map definition.
func RegisterMapType(x interface{}, name string) {
if reflect.TypeOf(x).Kind() != reflect.Map {
panic(fmt.Sprintf("RegisterMapType(%T, %q); want map", x, name))
}
if _, ok := protoMapTypes[name]; ok {
log.Printf("proto: duplicate proto type registered: %s", name)
return
}
t := reflect.TypeOf(x)
protoMapTypes[name] = t
revProtoTypes[t] = name
}
// MessageName returns the fully-qualified proto name for the given message type.
func MessageName(x Message) string {
type xname interface {
XXX_MessageName() string
}
if m, ok := x.(xname); ok {
return m.XXX_MessageName()
}
return revProtoTypes[reflect.TypeOf(x)]
}
// MessageType returns the message type (pointer to struct) for a named message.
// The type is not guaranteed to implement proto.Message if the name refers to a
// map entry.
func MessageType(name string) reflect.Type {
if t, ok := protoTypedNils[name]; ok {
return reflect.TypeOf(t)
}
return protoMapTypes[name]
}
// A registry of all linked proto files.
var (
protoFiles = make(map[string][]byte) // file name => fileDescriptor
)
// RegisterFile is called from generated code and maps from the
// full file name of a .proto file to its compressed FileDescriptorProto.
func RegisterFile(filename string, fileDescriptor []byte) {
protoFiles[filename] = fileDescriptor
}
// FileDescriptor returns the compressed FileDescriptorProto for a .proto file.
func FileDescriptor(filename string) []byte { return protoFiles[filename] } | // See encode.go, (*Buffer).enc_struct.
func (sp *StructProperties) Len() int { return len(sp.order) } |
snat.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['SnatArgs', 'Snat']
@pulumi.input_type
class SnatArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
origins: pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]],
autolasthop: Optional[pulumi.Input[str]] = None,
full_path: Optional[pulumi.Input[str]] = None,
mirror: Optional[pulumi.Input[str]] = None,
partition: Optional[pulumi.Input[str]] = None,
snatpool: Optional[pulumi.Input[str]] = None,
sourceport: Optional[pulumi.Input[str]] = None,
translation: Optional[pulumi.Input[str]] = None,
vlans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
vlansdisabled: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a Snat resource.
:param pulumi.Input[str] name: Name of the snat
:param pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]] origins: IP or hostname of the snat
:param pulumi.Input[str] autolasthop: -(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
:param pulumi.Input[str] full_path: Fullpath
:param pulumi.Input[str] mirror: Enables or disables mirroring of SNAT connections.
:param pulumi.Input[str] partition: Displays the administrative partition within which this profile resides
:param pulumi.Input[str] snatpool: Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
:param pulumi.Input[str] sourceport: Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
:param pulumi.Input[str] translation: Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] vlans: Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
:param pulumi.Input[bool] vlansdisabled: Disables the SNAT on all VLANs.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "origins", origins)
if autolasthop is not None:
pulumi.set(__self__, "autolasthop", autolasthop)
if full_path is not None:
pulumi.set(__self__, "full_path", full_path)
if mirror is not None:
pulumi.set(__self__, "mirror", mirror)
if partition is not None:
pulumi.set(__self__, "partition", partition)
if snatpool is not None:
pulumi.set(__self__, "snatpool", snatpool)
if sourceport is not None:
pulumi.set(__self__, "sourceport", sourceport)
if translation is not None:
pulumi.set(__self__, "translation", translation)
if vlans is not None:
pulumi.set(__self__, "vlans", vlans)
if vlansdisabled is not None:
pulumi.set(__self__, "vlansdisabled", vlansdisabled)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the snat
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def origins(self) -> pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]]:
"""
IP or hostname of the snat
"""
return pulumi.get(self, "origins")
@origins.setter
def origins(self, value: pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]]):
pulumi.set(self, "origins", value)
@property
@pulumi.getter
def autolasthop(self) -> Optional[pulumi.Input[str]]:
"""
-(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
"""
return pulumi.get(self, "autolasthop")
@autolasthop.setter
def autolasthop(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autolasthop", value)
@property
@pulumi.getter(name="fullPath")
def full_path(self) -> Optional[pulumi.Input[str]]:
"""
Fullpath
"""
return pulumi.get(self, "full_path")
@full_path.setter
def full_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "full_path", value)
@property
@pulumi.getter
def mirror(self) -> Optional[pulumi.Input[str]]:
"""
Enables or disables mirroring of SNAT connections.
"""
return pulumi.get(self, "mirror")
@mirror.setter
def mirror(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mirror", value)
@property
@pulumi.getter
def partition(self) -> Optional[pulumi.Input[str]]:
"""
Displays the administrative partition within which this profile resides
"""
return pulumi.get(self, "partition")
@partition.setter
def partition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partition", value)
@property
@pulumi.getter
def snatpool(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
"""
return pulumi.get(self, "snatpool")
@snatpool.setter
def snatpool(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snatpool", value)
@property
@pulumi.getter
def sourceport(self) -> Optional[pulumi.Input[str]]:
"""
Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
"""
return pulumi.get(self, "sourceport")
@sourceport.setter
def sourceport(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sourceport", value)
@property
@pulumi.getter
def translation(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
"""
return pulumi.get(self, "translation")
@translation.setter
def translation(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "translation", value)
@property
@pulumi.getter
def vlans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
"""
return pulumi.get(self, "vlans")
@vlans.setter
def vlans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "vlans", value)
@property
@pulumi.getter
def vlansdisabled(self) -> Optional[pulumi.Input[bool]]:
"""
Disables the SNAT on all VLANs.
"""
return pulumi.get(self, "vlansdisabled")
@vlansdisabled.setter
def vlansdisabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "vlansdisabled", value)
@pulumi.input_type
class _SnatState:
def __init__(__self__, *,
autolasthop: Optional[pulumi.Input[str]] = None,
full_path: Optional[pulumi.Input[str]] = None,
mirror: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]]] = None,
partition: Optional[pulumi.Input[str]] = None,
snatpool: Optional[pulumi.Input[str]] = None,
sourceport: Optional[pulumi.Input[str]] = None,
translation: Optional[pulumi.Input[str]] = None,
vlans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
vlansdisabled: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering Snat resources.
:param pulumi.Input[str] autolasthop: -(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
:param pulumi.Input[str] full_path: Fullpath
:param pulumi.Input[str] mirror: Enables or disables mirroring of SNAT connections.
:param pulumi.Input[str] name: Name of the snat
:param pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]] origins: IP or hostname of the snat
:param pulumi.Input[str] partition: Displays the administrative partition within which this profile resides
:param pulumi.Input[str] snatpool: Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
:param pulumi.Input[str] sourceport: Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
:param pulumi.Input[str] translation: Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] vlans: Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
:param pulumi.Input[bool] vlansdisabled: Disables the SNAT on all VLANs.
"""
if autolasthop is not None:
pulumi.set(__self__, "autolasthop", autolasthop)
if full_path is not None:
pulumi.set(__self__, "full_path", full_path)
if mirror is not None:
pulumi.set(__self__, "mirror", mirror)
if name is not None:
pulumi.set(__self__, "name", name)
if origins is not None:
pulumi.set(__self__, "origins", origins)
if partition is not None:
pulumi.set(__self__, "partition", partition)
if snatpool is not None:
pulumi.set(__self__, "snatpool", snatpool)
if sourceport is not None:
pulumi.set(__self__, "sourceport", sourceport)
if translation is not None:
pulumi.set(__self__, "translation", translation)
if vlans is not None:
pulumi.set(__self__, "vlans", vlans)
if vlansdisabled is not None:
pulumi.set(__self__, "vlansdisabled", vlansdisabled)
@property
@pulumi.getter
def autolasthop(self) -> Optional[pulumi.Input[str]]:
"""
-(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
"""
return pulumi.get(self, "autolasthop")
@autolasthop.setter
def autolasthop(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autolasthop", value)
@property
@pulumi.getter(name="fullPath")
def full_path(self) -> Optional[pulumi.Input[str]]:
"""
Fullpath
"""
return pulumi.get(self, "full_path")
@full_path.setter
def full_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "full_path", value)
@property
@pulumi.getter
def mirror(self) -> Optional[pulumi.Input[str]]:
"""
Enables or disables mirroring of SNAT connections.
"""
return pulumi.get(self, "mirror")
@mirror.setter
def mirror(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mirror", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the snat
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def origins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]]]:
"""
IP or hostname of the snat
"""
return pulumi.get(self, "origins")
@origins.setter
def origins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SnatOriginArgs']]]]):
pulumi.set(self, "origins", value)
@property
@pulumi.getter
def partition(self) -> Optional[pulumi.Input[str]]:
"""
Displays the administrative partition within which this profile resides
"""
return pulumi.get(self, "partition")
@partition.setter
def partition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partition", value)
@property
@pulumi.getter
def snatpool(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
"""
return pulumi.get(self, "snatpool")
@snatpool.setter
def snatpool(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snatpool", value)
@property
@pulumi.getter
def sourceport(self) -> Optional[pulumi.Input[str]]:
"""
Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
"""
return pulumi.get(self, "sourceport")
@sourceport.setter
def sourceport(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sourceport", value)
@property
@pulumi.getter
def translation(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
"""
return pulumi.get(self, "translation")
@translation.setter
def translation(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "translation", value)
@property
@pulumi.getter
def vlans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
"""
return pulumi.get(self, "vlans")
@vlans.setter
def vlans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "vlans", value)
@property
@pulumi.getter
def vlansdisabled(self) -> Optional[pulumi.Input[bool]]:
"""
Disables the SNAT on all VLANs.
"""
return pulumi.get(self, "vlansdisabled")
@vlansdisabled.setter
def vlansdisabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "vlansdisabled", value)
class Snat(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autolasthop: Optional[pulumi.Input[str]] = None,
full_path: Optional[pulumi.Input[str]] = None,
mirror: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SnatOriginArgs']]]]] = None,
partition: Optional[pulumi.Input[str]] = None,
snatpool: Optional[pulumi.Input[str]] = None,
sourceport: Optional[pulumi.Input[str]] = None,
translation: Optional[pulumi.Input[str]] = None,
vlans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
vlansdisabled: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
`ltm.Snat` Manages a snat configuration
For resources should be named with their "full path". The full path is the combination of the partition + name of the resource. For example /Common/my-pool.
## Example Usage
```python
import pulumi
import pulumi_f5bigip as f5bigip
test_snat = f5bigip.ltm.Snat("test-snat",
autolasthop="default",
full_path="/Common/test-snat",
mirror="disabled",
name="TEST_SNAT_NAME",
origins=[
f5bigip.ltm.SnatOriginArgs(
name="2.2.2.2",
),
f5bigip.ltm.SnatOriginArgs(
name="3.3.3.3",
),
],
partition="Common",
translation="/Common/136.1.1.1",
vlansdisabled=True)
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] autolasthop: -(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
:param pulumi.Input[str] full_path: Fullpath
:param pulumi.Input[str] mirror: Enables or disables mirroring of SNAT connections.
:param pulumi.Input[str] name: Name of the snat
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SnatOriginArgs']]]] origins: IP or hostname of the snat
:param pulumi.Input[str] partition: Displays the administrative partition within which this profile resides
:param pulumi.Input[str] snatpool: Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
:param pulumi.Input[str] sourceport: Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
:param pulumi.Input[str] translation: Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] vlans: Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
:param pulumi.Input[bool] vlansdisabled: Disables the SNAT on all VLANs.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SnatArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
`ltm.Snat` Manages a snat configuration
For resources should be named with their "full path". The full path is the combination of the partition + name of the resource. For example /Common/my-pool.
## Example Usage
```python
import pulumi
import pulumi_f5bigip as f5bigip
test_snat = f5bigip.ltm.Snat("test-snat",
autolasthop="default",
full_path="/Common/test-snat",
mirror="disabled",
name="TEST_SNAT_NAME",
origins=[
f5bigip.ltm.SnatOriginArgs(
name="2.2.2.2",
), | ),
],
partition="Common",
translation="/Common/136.1.1.1",
vlansdisabled=True)
```
:param str resource_name: The name of the resource.
:param SnatArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SnatArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autolasthop: Optional[pulumi.Input[str]] = None,
full_path: Optional[pulumi.Input[str]] = None,
mirror: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SnatOriginArgs']]]]] = None,
partition: Optional[pulumi.Input[str]] = None,
snatpool: Optional[pulumi.Input[str]] = None,
sourceport: Optional[pulumi.Input[str]] = None,
translation: Optional[pulumi.Input[str]] = None,
vlans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
vlansdisabled: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SnatArgs.__new__(SnatArgs)
__props__.__dict__["autolasthop"] = autolasthop
__props__.__dict__["full_path"] = full_path
__props__.__dict__["mirror"] = mirror
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
if origins is None and not opts.urn:
raise TypeError("Missing required property 'origins'")
__props__.__dict__["origins"] = origins
__props__.__dict__["partition"] = partition
__props__.__dict__["snatpool"] = snatpool
__props__.__dict__["sourceport"] = sourceport
__props__.__dict__["translation"] = translation
__props__.__dict__["vlans"] = vlans
__props__.__dict__["vlansdisabled"] = vlansdisabled
super(Snat, __self__).__init__(
'f5bigip:ltm/snat:Snat',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
autolasthop: Optional[pulumi.Input[str]] = None,
full_path: Optional[pulumi.Input[str]] = None,
mirror: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
origins: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SnatOriginArgs']]]]] = None,
partition: Optional[pulumi.Input[str]] = None,
snatpool: Optional[pulumi.Input[str]] = None,
sourceport: Optional[pulumi.Input[str]] = None,
translation: Optional[pulumi.Input[str]] = None,
vlans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
vlansdisabled: Optional[pulumi.Input[bool]] = None) -> 'Snat':
"""
Get an existing Snat resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] autolasthop: -(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
:param pulumi.Input[str] full_path: Fullpath
:param pulumi.Input[str] mirror: Enables or disables mirroring of SNAT connections.
:param pulumi.Input[str] name: Name of the snat
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SnatOriginArgs']]]] origins: IP or hostname of the snat
:param pulumi.Input[str] partition: Displays the administrative partition within which this profile resides
:param pulumi.Input[str] snatpool: Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
:param pulumi.Input[str] sourceport: Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
:param pulumi.Input[str] translation: Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
:param pulumi.Input[Sequence[pulumi.Input[str]]] vlans: Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
:param pulumi.Input[bool] vlansdisabled: Disables the SNAT on all VLANs.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SnatState.__new__(_SnatState)
__props__.__dict__["autolasthop"] = autolasthop
__props__.__dict__["full_path"] = full_path
__props__.__dict__["mirror"] = mirror
__props__.__dict__["name"] = name
__props__.__dict__["origins"] = origins
__props__.__dict__["partition"] = partition
__props__.__dict__["snatpool"] = snatpool
__props__.__dict__["sourceport"] = sourceport
__props__.__dict__["translation"] = translation
__props__.__dict__["vlans"] = vlans
__props__.__dict__["vlansdisabled"] = vlansdisabled
return Snat(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def autolasthop(self) -> pulumi.Output[Optional[str]]:
"""
-(Optional) Specifies whether to automatically map last hop for pools or not. The default is to use next level's default.
"""
return pulumi.get(self, "autolasthop")
@property
@pulumi.getter(name="fullPath")
def full_path(self) -> pulumi.Output[Optional[str]]:
"""
Fullpath
"""
return pulumi.get(self, "full_path")
@property
@pulumi.getter
def mirror(self) -> pulumi.Output[Optional[str]]:
"""
Enables or disables mirroring of SNAT connections.
"""
return pulumi.get(self, "mirror")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the snat
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def origins(self) -> pulumi.Output[Sequence['outputs.SnatOrigin']]:
"""
IP or hostname of the snat
"""
return pulumi.get(self, "origins")
@property
@pulumi.getter
def partition(self) -> pulumi.Output[Optional[str]]:
"""
Displays the administrative partition within which this profile resides
"""
return pulumi.get(self, "partition")
@property
@pulumi.getter
def snatpool(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the name of a SNAT pool. You can only use this option when automap and translation are not used.
"""
return pulumi.get(self, "snatpool")
@property
@pulumi.getter
def sourceport(self) -> pulumi.Output[Optional[str]]:
"""
Specifies whether the system preserves the source port of the connection. The default is preserve. Use of the preserve-strict setting should be restricted to UDP only under very special circumstances such as nPath or transparent (that is, no translation of any other L3/L4 field), where there is a 1:1 relationship between virtual IP addresses and node addresses, or when clustered multi-processing (CMP) is disabled. The change setting is useful for obfuscating internal network addresses.
"""
return pulumi.get(self, "sourceport")
@property
@pulumi.getter
def translation(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the name of a translated IP address. Note that translated addresses are outside the traffic management system. You can only use this option when automap and snatpool are not used.
"""
return pulumi.get(self, "translation")
@property
@pulumi.getter
def vlans(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Specifies the name of the VLAN to which you want to assign the SNAT. The default is vlans-enabled.
"""
return pulumi.get(self, "vlans")
@property
@pulumi.getter
def vlansdisabled(self) -> pulumi.Output[Optional[bool]]:
"""
Disables the SNAT on all VLANs.
"""
return pulumi.get(self, "vlansdisabled") | f5bigip.ltm.SnatOriginArgs(
name="3.3.3.3", |
applicationserver_web.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: lorawan-stack/api/applicationserver_web.proto
package ttnpb
import (
context "context"
fmt "fmt"
io "io"
math "math"
math_bits "math/bits"
reflect "reflect"
strings "strings"
time "time"
_ "github.com/envoyproxy/protoc-gen-validate/validate"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
github_com_gogo_protobuf_types "github.com/gogo/protobuf/types"
types "github.com/gogo/protobuf/types"
golang_proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = golang_proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
var _ = time.Kitchen
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type ApplicationWebhookIdentifiers struct {
ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3,embedded=application_ids" json:"application_ids"`
WebhookID string `protobuf:"bytes,2,opt,name=webhook_id,json=webhookId,proto3" json:"webhook_id,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookIdentifiers) Reset() { *m = ApplicationWebhookIdentifiers{} }
func (*ApplicationWebhookIdentifiers) ProtoMessage() {}
func (*ApplicationWebhookIdentifiers) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{0}
}
func (m *ApplicationWebhookIdentifiers) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookIdentifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookIdentifiers.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookIdentifiers) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookIdentifiers.Merge(m, src)
}
func (m *ApplicationWebhookIdentifiers) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookIdentifiers) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookIdentifiers.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookIdentifiers proto.InternalMessageInfo
func (m *ApplicationWebhookIdentifiers) GetWebhookID() string {
if m != nil {
return m.WebhookID
}
return ""
}
type ApplicationWebhookTemplateIdentifiers struct {
TemplateID string `protobuf:"bytes,1,opt,name=template_id,json=templateId,proto3" json:"template_id,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookTemplateIdentifiers) Reset() { *m = ApplicationWebhookTemplateIdentifiers{} }
func (*ApplicationWebhookTemplateIdentifiers) ProtoMessage() {}
func (*ApplicationWebhookTemplateIdentifiers) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{1}
}
func (m *ApplicationWebhookTemplateIdentifiers) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookTemplateIdentifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookTemplateIdentifiers.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookTemplateIdentifiers) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookTemplateIdentifiers.Merge(m, src)
}
func (m *ApplicationWebhookTemplateIdentifiers) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookTemplateIdentifiers) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookTemplateIdentifiers.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookTemplateIdentifiers proto.InternalMessageInfo
func (m *ApplicationWebhookTemplateIdentifiers) GetTemplateID() string {
if m != nil {
return m.TemplateID
}
return ""
}
// ApplicationWebhookTemplateField represents a custom field that needs to be filled by the user in order to use the template.
// A field can be an API key, an username or password, or any custom platform specific field (such as region).
// The fields are meant to be replaced inside the URLs and headers when the webhook is created.
type ApplicationWebhookTemplateField struct {
ID string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
// Secret decides if the field should be shown in plain-text or should stay hidden.
Secret bool `protobuf:"varint,4,opt,name=secret,proto3" json:"secret,omitempty"`
DefaultValue string `protobuf:"bytes,5,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookTemplateField) Reset() { *m = ApplicationWebhookTemplateField{} }
func (*ApplicationWebhookTemplateField) ProtoMessage() {}
func (*ApplicationWebhookTemplateField) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{2}
}
func (m *ApplicationWebhookTemplateField) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookTemplateField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookTemplateField.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookTemplateField) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookTemplateField.Merge(m, src)
}
func (m *ApplicationWebhookTemplateField) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookTemplateField) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookTemplateField.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookTemplateField proto.InternalMessageInfo
func (m *ApplicationWebhookTemplateField) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *ApplicationWebhookTemplateField) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *ApplicationWebhookTemplateField) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *ApplicationWebhookTemplateField) GetSecret() bool {
if m != nil {
return m.Secret
}
return false
}
func (m *ApplicationWebhookTemplateField) GetDefaultValue() string {
if m != nil {
return m.DefaultValue
}
return ""
}
type ApplicationWebhookTemplate struct {
ApplicationWebhookTemplateIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3,embedded=ids" json:"ids"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
LogoURL string `protobuf:"bytes,4,opt,name=logo_url,json=logoUrl,proto3" json:"logo_url,omitempty"`
InfoURL string `protobuf:"bytes,5,opt,name=info_url,json=infoUrl,proto3" json:"info_url,omitempty"`
DocumentationURL string `protobuf:"bytes,6,opt,name=documentation_url,json=documentationUrl,proto3" json:"documentation_url,omitempty"`
// The base URL of the template. Can contain template fields, in RFC 6570 format.
BaseURL string `protobuf:"bytes,7,opt,name=base_url,json=baseUrl,proto3" json:"base_url,omitempty"`
// The HTTP headers used by the template. Both the key and the value can contain template fields.
Headers map[string]string `protobuf:"bytes,8,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
Format string `protobuf:"bytes,9,opt,name=format,proto3" json:"format,omitempty"`
Fields []*ApplicationWebhookTemplateField `protobuf:"bytes,10,rep,name=fields,proto3" json:"fields,omitempty"`
UplinkMessage *ApplicationWebhookTemplate_Message `protobuf:"bytes,11,opt,name=uplink_message,json=uplinkMessage,proto3" json:"uplink_message,omitempty"`
JoinAccept *ApplicationWebhookTemplate_Message `protobuf:"bytes,12,opt,name=join_accept,json=joinAccept,proto3" json:"join_accept,omitempty"`
DownlinkAck *ApplicationWebhookTemplate_Message `protobuf:"bytes,13,opt,name=downlink_ack,json=downlinkAck,proto3" json:"downlink_ack,omitempty"`
DownlinkNack *ApplicationWebhookTemplate_Message `protobuf:"bytes,14,opt,name=downlink_nack,json=downlinkNack,proto3" json:"downlink_nack,omitempty"`
DownlinkSent *ApplicationWebhookTemplate_Message `protobuf:"bytes,15,opt,name=downlink_sent,json=downlinkSent,proto3" json:"downlink_sent,omitempty"`
DownlinkFailed *ApplicationWebhookTemplate_Message `protobuf:"bytes,16,opt,name=downlink_failed,json=downlinkFailed,proto3" json:"downlink_failed,omitempty"`
DownlinkQueued *ApplicationWebhookTemplate_Message `protobuf:"bytes,17,opt,name=downlink_queued,json=downlinkQueued,proto3" json:"downlink_queued,omitempty"`
LocationSolved *ApplicationWebhookTemplate_Message `protobuf:"bytes,18,opt,name=location_solved,json=locationSolved,proto3" json:"location_solved,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookTemplate) Reset() { *m = ApplicationWebhookTemplate{} }
func (*ApplicationWebhookTemplate) ProtoMessage() {}
func (*ApplicationWebhookTemplate) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{3}
}
func (m *ApplicationWebhookTemplate) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookTemplate.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookTemplate) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookTemplate.Merge(m, src)
}
func (m *ApplicationWebhookTemplate) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookTemplate) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookTemplate.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookTemplate proto.InternalMessageInfo
func (m *ApplicationWebhookTemplate) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *ApplicationWebhookTemplate) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *ApplicationWebhookTemplate) GetLogoURL() string {
if m != nil {
return m.LogoURL
}
return ""
}
func (m *ApplicationWebhookTemplate) GetInfoURL() string {
if m != nil {
return m.InfoURL
}
return ""
}
func (m *ApplicationWebhookTemplate) GetDocumentationURL() string {
if m != nil {
return m.DocumentationURL
}
return ""
}
func (m *ApplicationWebhookTemplate) GetBaseURL() string {
if m != nil {
return m.BaseURL
}
return ""
}
func (m *ApplicationWebhookTemplate) GetHeaders() map[string]string {
if m != nil {
return m.Headers
}
return nil
}
func (m *ApplicationWebhookTemplate) GetFormat() string {
if m != nil {
return m.Format
}
return ""
}
func (m *ApplicationWebhookTemplate) GetFields() []*ApplicationWebhookTemplateField {
if m != nil {
return m.Fields
}
return nil
}
func (m *ApplicationWebhookTemplate) GetUplinkMessage() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.UplinkMessage
}
return nil
}
func (m *ApplicationWebhookTemplate) GetJoinAccept() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.JoinAccept
}
return nil
}
func (m *ApplicationWebhookTemplate) GetDownlinkAck() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.DownlinkAck
}
return nil
}
func (m *ApplicationWebhookTemplate) GetDownlinkNack() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.DownlinkNack
}
return nil
}
func (m *ApplicationWebhookTemplate) GetDownlinkSent() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.DownlinkSent
}
return nil
}
func (m *ApplicationWebhookTemplate) GetDownlinkFailed() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.DownlinkFailed
}
return nil
}
func (m *ApplicationWebhookTemplate) GetDownlinkQueued() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.DownlinkQueued
}
return nil
}
func (m *ApplicationWebhookTemplate) GetLocationSolved() *ApplicationWebhookTemplate_Message {
if m != nil {
return m.LocationSolved
}
return nil
}
type ApplicationWebhookTemplate_Message struct {
// Path to append to the base URL. Can contain template fields, in RFC 6570 format.
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookTemplate_Message) Reset() { *m = ApplicationWebhookTemplate_Message{} }
func (*ApplicationWebhookTemplate_Message) ProtoMessage() {}
func (*ApplicationWebhookTemplate_Message) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{3, 1}
}
func (m *ApplicationWebhookTemplate_Message) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookTemplate_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookTemplate_Message.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookTemplate_Message) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookTemplate_Message.Merge(m, src)
}
func (m *ApplicationWebhookTemplate_Message) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookTemplate_Message) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookTemplate_Message.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookTemplate_Message proto.InternalMessageInfo
func (m *ApplicationWebhookTemplate_Message) GetPath() string {
if m != nil {
return m.Path
}
return ""
}
type ApplicationWebhookTemplates struct {
Templates []*ApplicationWebhookTemplate `protobuf:"bytes,1,rep,name=templates,proto3" json:"templates,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookTemplates) Reset() { *m = ApplicationWebhookTemplates{} }
func (*ApplicationWebhookTemplates) ProtoMessage() {}
func (*ApplicationWebhookTemplates) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{4}
}
func (m *ApplicationWebhookTemplates) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookTemplates) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookTemplates.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookTemplates) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookTemplates.Merge(m, src)
}
func (m *ApplicationWebhookTemplates) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookTemplates) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookTemplates.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookTemplates proto.InternalMessageInfo
func (m *ApplicationWebhookTemplates) GetTemplates() []*ApplicationWebhookTemplate {
if m != nil {
return m.Templates
}
return nil
}
type ApplicationWebhook struct {
ApplicationWebhookIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3,embedded=ids" json:"ids"`
CreatedAt time.Time `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3,stdtime" json:"created_at"`
UpdatedAt time.Time `protobuf:"bytes,3,opt,name=updated_at,json=updatedAt,proto3,stdtime" json:"updated_at"`
// Base URL to which the message's path is appended.
BaseURL string `protobuf:"bytes,4,opt,name=base_url,json=baseUrl,proto3" json:"base_url,omitempty"`
// HTTP headers to use.
Headers map[string]string `protobuf:"bytes,5,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// The format to use for the body.
// Supported values depend on the Application Server configuration.
Format string `protobuf:"bytes,6,opt,name=format,proto3" json:"format,omitempty"`
// The ID of the template that was used to create the Webhook.
*ApplicationWebhookTemplateIdentifiers `protobuf:"bytes,15,opt,name=template_ids,json=templateIds,proto3,embedded=template_ids" json:"template_ids,omitempty"`
// The value of the fields used by the template. Maps field.id to the value.
TemplateFields map[string]string `protobuf:"bytes,16,rep,name=template_fields,json=templateFields,proto3" json:"template_fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
UplinkMessage *ApplicationWebhook_Message `protobuf:"bytes,7,opt,name=uplink_message,json=uplinkMessage,proto3" json:"uplink_message,omitempty"`
JoinAccept *ApplicationWebhook_Message `protobuf:"bytes,8,opt,name=join_accept,json=joinAccept,proto3" json:"join_accept,omitempty"`
DownlinkAck *ApplicationWebhook_Message `protobuf:"bytes,9,opt,name=downlink_ack,json=downlinkAck,proto3" json:"downlink_ack,omitempty"`
DownlinkNack *ApplicationWebhook_Message `protobuf:"bytes,10,opt,name=downlink_nack,json=downlinkNack,proto3" json:"downlink_nack,omitempty"`
DownlinkSent *ApplicationWebhook_Message `protobuf:"bytes,11,opt,name=downlink_sent,json=downlinkSent,proto3" json:"downlink_sent,omitempty"`
DownlinkFailed *ApplicationWebhook_Message `protobuf:"bytes,12,opt,name=downlink_failed,json=downlinkFailed,proto3" json:"downlink_failed,omitempty"`
DownlinkQueued *ApplicationWebhook_Message `protobuf:"bytes,13,opt,name=downlink_queued,json=downlinkQueued,proto3" json:"downlink_queued,omitempty"`
LocationSolved *ApplicationWebhook_Message `protobuf:"bytes,14,opt,name=location_solved,json=locationSolved,proto3" json:"location_solved,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhook) Reset() { *m = ApplicationWebhook{} }
func (*ApplicationWebhook) ProtoMessage() {}
func (*ApplicationWebhook) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{5}
}
func (m *ApplicationWebhook) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhook) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhook.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhook) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhook.Merge(m, src)
}
func (m *ApplicationWebhook) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhook) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhook.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhook proto.InternalMessageInfo
func (m *ApplicationWebhook) GetCreatedAt() time.Time {
if m != nil {
return m.CreatedAt
}
return time.Time{}
}
func (m *ApplicationWebhook) GetUpdatedAt() time.Time {
if m != nil {
return m.UpdatedAt
}
return time.Time{}
}
func (m *ApplicationWebhook) GetBaseURL() string {
if m != nil {
return m.BaseURL
}
return ""
}
func (m *ApplicationWebhook) GetHeaders() map[string]string {
if m != nil {
return m.Headers
}
return nil
}
func (m *ApplicationWebhook) GetFormat() string {
if m != nil {
return m.Format
}
return ""
}
func (m *ApplicationWebhook) GetTemplateFields() map[string]string {
if m != nil {
return m.TemplateFields
}
return nil
}
func (m *ApplicationWebhook) GetUplinkMessage() *ApplicationWebhook_Message {
if m != nil {
return m.UplinkMessage
}
return nil
}
func (m *ApplicationWebhook) GetJoinAccept() *ApplicationWebhook_Message {
if m != nil {
return m.JoinAccept
}
return nil
}
func (m *ApplicationWebhook) GetDownlinkAck() *ApplicationWebhook_Message {
if m != nil {
return m.DownlinkAck
}
return nil
}
func (m *ApplicationWebhook) GetDownlinkNack() *ApplicationWebhook_Message {
if m != nil {
return m.DownlinkNack
}
return nil
}
func (m *ApplicationWebhook) GetDownlinkSent() *ApplicationWebhook_Message {
if m != nil {
return m.DownlinkSent
}
return nil
}
func (m *ApplicationWebhook) GetDownlinkFailed() *ApplicationWebhook_Message {
if m != nil {
return m.DownlinkFailed
}
return nil
}
func (m *ApplicationWebhook) GetDownlinkQueued() *ApplicationWebhook_Message {
if m != nil {
return m.DownlinkQueued
}
return nil
}
func (m *ApplicationWebhook) GetLocationSolved() *ApplicationWebhook_Message {
if m != nil {
return m.LocationSolved
}
return nil
}
type ApplicationWebhook_Message struct {
// Path to append to the base URL.
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhook_Message) Reset() { *m = ApplicationWebhook_Message{} }
func (*ApplicationWebhook_Message) ProtoMessage() {}
func (*ApplicationWebhook_Message) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{5, 2}
}
func (m *ApplicationWebhook_Message) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhook_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhook_Message.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhook_Message) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhook_Message.Merge(m, src)
}
func (m *ApplicationWebhook_Message) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhook_Message) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhook_Message.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhook_Message proto.InternalMessageInfo
func (m *ApplicationWebhook_Message) GetPath() string {
if m != nil {
return m.Path
}
return ""
}
type ApplicationWebhooks struct {
Webhooks []*ApplicationWebhook `protobuf:"bytes,1,rep,name=webhooks,proto3" json:"webhooks,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhooks) Reset() { *m = ApplicationWebhooks{} }
func (*ApplicationWebhooks) ProtoMessage() {}
func (*ApplicationWebhooks) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{6}
}
func (m *ApplicationWebhooks) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhooks) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhooks.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhooks) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhooks.Merge(m, src)
}
func (m *ApplicationWebhooks) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhooks) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhooks.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhooks proto.InternalMessageInfo
func (m *ApplicationWebhooks) GetWebhooks() []*ApplicationWebhook {
if m != nil {
return m.Webhooks
}
return nil
}
type ApplicationWebhookFormats struct {
// Format and description.
Formats map[string]string `protobuf:"bytes,1,rep,name=formats,proto3" json:"formats,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationWebhookFormats) Reset() { *m = ApplicationWebhookFormats{} }
func (*ApplicationWebhookFormats) ProtoMessage() {}
func (*ApplicationWebhookFormats) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{7}
}
func (m *ApplicationWebhookFormats) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ApplicationWebhookFormats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ApplicationWebhookFormats.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ApplicationWebhookFormats) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationWebhookFormats.Merge(m, src)
}
func (m *ApplicationWebhookFormats) XXX_Size() int {
return m.Size()
}
func (m *ApplicationWebhookFormats) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationWebhookFormats.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationWebhookFormats proto.InternalMessageInfo
func (m *ApplicationWebhookFormats) GetFormats() map[string]string {
if m != nil {
return m.Formats
}
return nil
}
type GetApplicationWebhookRequest struct {
ApplicationWebhookIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3,embedded=ids" json:"ids"`
FieldMask types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetApplicationWebhookRequest) Reset() { *m = GetApplicationWebhookRequest{} }
func (*GetApplicationWebhookRequest) ProtoMessage() {}
func (*GetApplicationWebhookRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{8}
}
func (m *GetApplicationWebhookRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *GetApplicationWebhookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_GetApplicationWebhookRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *GetApplicationWebhookRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetApplicationWebhookRequest.Merge(m, src)
}
func (m *GetApplicationWebhookRequest) XXX_Size() int {
return m.Size()
}
func (m *GetApplicationWebhookRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetApplicationWebhookRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetApplicationWebhookRequest proto.InternalMessageInfo
func (m *GetApplicationWebhookRequest) GetFieldMask() types.FieldMask {
if m != nil {
return m.FieldMask
}
return types.FieldMask{}
}
type ListApplicationWebhooksRequest struct {
ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3,embedded=application_ids" json:"application_ids"`
FieldMask types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListApplicationWebhooksRequest) Reset() { *m = ListApplicationWebhooksRequest{} }
func (*ListApplicationWebhooksRequest) ProtoMessage() {}
func (*ListApplicationWebhooksRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{9}
}
func (m *ListApplicationWebhooksRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ListApplicationWebhooksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ListApplicationWebhooksRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ListApplicationWebhooksRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListApplicationWebhooksRequest.Merge(m, src)
}
func (m *ListApplicationWebhooksRequest) XXX_Size() int {
return m.Size()
}
func (m *ListApplicationWebhooksRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListApplicationWebhooksRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListApplicationWebhooksRequest proto.InternalMessageInfo
func (m *ListApplicationWebhooksRequest) GetFieldMask() types.FieldMask {
if m != nil {
return m.FieldMask
}
return types.FieldMask{}
}
type SetApplicationWebhookRequest struct {
ApplicationWebhook `protobuf:"bytes,1,opt,name=webhook,proto3,embedded=webhook" json:"webhook"`
FieldMask types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SetApplicationWebhookRequest) Reset() { *m = SetApplicationWebhookRequest{} }
func (*SetApplicationWebhookRequest) ProtoMessage() {}
func (*SetApplicationWebhookRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{10}
}
func (m *SetApplicationWebhookRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *SetApplicationWebhookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_SetApplicationWebhookRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *SetApplicationWebhookRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_SetApplicationWebhookRequest.Merge(m, src)
}
func (m *SetApplicationWebhookRequest) XXX_Size() int {
return m.Size()
}
func (m *SetApplicationWebhookRequest) XXX_DiscardUnknown() {
xxx_messageInfo_SetApplicationWebhookRequest.DiscardUnknown(m)
}
var xxx_messageInfo_SetApplicationWebhookRequest proto.InternalMessageInfo
func (m *SetApplicationWebhookRequest) GetFieldMask() types.FieldMask {
if m != nil {
return m.FieldMask
}
return types.FieldMask{}
}
type GetApplicationWebhookTemplateRequest struct {
ApplicationWebhookTemplateIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3,embedded=ids" json:"ids"`
FieldMask types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetApplicationWebhookTemplateRequest) Reset() { *m = GetApplicationWebhookTemplateRequest{} }
func (*GetApplicationWebhookTemplateRequest) ProtoMessage() {}
func (*GetApplicationWebhookTemplateRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{11}
}
func (m *GetApplicationWebhookTemplateRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *GetApplicationWebhookTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_GetApplicationWebhookTemplateRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *GetApplicationWebhookTemplateRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetApplicationWebhookTemplateRequest.Merge(m, src)
}
func (m *GetApplicationWebhookTemplateRequest) XXX_Size() int {
return m.Size()
}
func (m *GetApplicationWebhookTemplateRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetApplicationWebhookTemplateRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetApplicationWebhookTemplateRequest proto.InternalMessageInfo
func (m *GetApplicationWebhookTemplateRequest) GetFieldMask() types.FieldMask {
if m != nil {
return m.FieldMask
}
return types.FieldMask{}
}
type ListApplicationWebhookTemplatesRequest struct {
FieldMask types.FieldMask `protobuf:"bytes,1,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListApplicationWebhookTemplatesRequest) Reset() {
*m = ListApplicationWebhookTemplatesRequest{}
}
func (*ListApplicationWebhookTemplatesRequest) ProtoMessage() {}
func (*ListApplicationWebhookTemplatesRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2652f2d8eaceda0e, []int{12}
}
func (m *ListApplicationWebhookTemplatesRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ListApplicationWebhookTemplatesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ListApplicationWebhookTemplatesRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ListApplicationWebhookTemplatesRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListApplicationWebhookTemplatesRequest.Merge(m, src)
}
func (m *ListApplicationWebhookTemplatesRequest) XXX_Size() int {
return m.Size()
}
func (m *ListApplicationWebhookTemplatesRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListApplicationWebhookTemplatesRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListApplicationWebhookTemplatesRequest proto.InternalMessageInfo
func (m *ListApplicationWebhookTemplatesRequest) GetFieldMask() types.FieldMask {
if m != nil {
return m.FieldMask
}
return types.FieldMask{}
}
func init() {
proto.RegisterType((*ApplicationWebhookIdentifiers)(nil), "ttn.lorawan.v3.ApplicationWebhookIdentifiers")
golang_proto.RegisterType((*ApplicationWebhookIdentifiers)(nil), "ttn.lorawan.v3.ApplicationWebhookIdentifiers")
proto.RegisterType((*ApplicationWebhookTemplateIdentifiers)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplateIdentifiers")
golang_proto.RegisterType((*ApplicationWebhookTemplateIdentifiers)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplateIdentifiers")
proto.RegisterType((*ApplicationWebhookTemplateField)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplateField")
golang_proto.RegisterType((*ApplicationWebhookTemplateField)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplateField")
proto.RegisterType((*ApplicationWebhookTemplate)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplate")
golang_proto.RegisterType((*ApplicationWebhookTemplate)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplate")
proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplate.HeadersEntry")
golang_proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplate.HeadersEntry")
proto.RegisterType((*ApplicationWebhookTemplate_Message)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplate.Message")
golang_proto.RegisterType((*ApplicationWebhookTemplate_Message)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplate.Message")
proto.RegisterType((*ApplicationWebhookTemplates)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplates")
golang_proto.RegisterType((*ApplicationWebhookTemplates)(nil), "ttn.lorawan.v3.ApplicationWebhookTemplates")
proto.RegisterType((*ApplicationWebhook)(nil), "ttn.lorawan.v3.ApplicationWebhook")
golang_proto.RegisterType((*ApplicationWebhook)(nil), "ttn.lorawan.v3.ApplicationWebhook")
proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhook.HeadersEntry")
golang_proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhook.HeadersEntry")
proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhook.TemplateFieldsEntry")
golang_proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhook.TemplateFieldsEntry")
proto.RegisterType((*ApplicationWebhook_Message)(nil), "ttn.lorawan.v3.ApplicationWebhook.Message")
golang_proto.RegisterType((*ApplicationWebhook_Message)(nil), "ttn.lorawan.v3.ApplicationWebhook.Message")
proto.RegisterType((*ApplicationWebhooks)(nil), "ttn.lorawan.v3.ApplicationWebhooks")
golang_proto.RegisterType((*ApplicationWebhooks)(nil), "ttn.lorawan.v3.ApplicationWebhooks")
proto.RegisterType((*ApplicationWebhookFormats)(nil), "ttn.lorawan.v3.ApplicationWebhookFormats")
golang_proto.RegisterType((*ApplicationWebhookFormats)(nil), "ttn.lorawan.v3.ApplicationWebhookFormats")
proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhookFormats.FormatsEntry")
golang_proto.RegisterMapType((map[string]string)(nil), "ttn.lorawan.v3.ApplicationWebhookFormats.FormatsEntry")
proto.RegisterType((*GetApplicationWebhookRequest)(nil), "ttn.lorawan.v3.GetApplicationWebhookRequest")
golang_proto.RegisterType((*GetApplicationWebhookRequest)(nil), "ttn.lorawan.v3.GetApplicationWebhookRequest")
proto.RegisterType((*ListApplicationWebhooksRequest)(nil), "ttn.lorawan.v3.ListApplicationWebhooksRequest")
golang_proto.RegisterType((*ListApplicationWebhooksRequest)(nil), "ttn.lorawan.v3.ListApplicationWebhooksRequest")
proto.RegisterType((*SetApplicationWebhookRequest)(nil), "ttn.lorawan.v3.SetApplicationWebhookRequest")
golang_proto.RegisterType((*SetApplicationWebhookRequest)(nil), "ttn.lorawan.v3.SetApplicationWebhookRequest")
proto.RegisterType((*GetApplicationWebhookTemplateRequest)(nil), "ttn.lorawan.v3.GetApplicationWebhookTemplateRequest")
golang_proto.RegisterType((*GetApplicationWebhookTemplateRequest)(nil), "ttn.lorawan.v3.GetApplicationWebhookTemplateRequest")
proto.RegisterType((*ListApplicationWebhookTemplatesRequest)(nil), "ttn.lorawan.v3.ListApplicationWebhookTemplatesRequest")
golang_proto.RegisterType((*ListApplicationWebhookTemplatesRequest)(nil), "ttn.lorawan.v3.ListApplicationWebhookTemplatesRequest")
}
func init() {
proto.RegisterFile("lorawan-stack/api/applicationserver_web.proto", fileDescriptor_2652f2d8eaceda0e)
}
func init() {
golang_proto.RegisterFile("lorawan-stack/api/applicationserver_web.proto", fileDescriptor_2652f2d8eaceda0e)
}
var fileDescriptor_2652f2d8eaceda0e = []byte{
// 1729 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0x4d, 0x6c, 0xdb, 0xc8,
0x15, 0xe6, 0x48, 0xb2, 0x65, 0x8d, 0xfc, 0xb7, 0xe3, 0xec, 0x96, 0x95, 0x1d, 0xda, 0xe0, 0xba,
0xbb, 0xb6, 0x6b, 0x51, 0x85, 0x77, 0xd3, 0x76, 0x8d, 0x76, 0x0d, 0xab, 0xde, 0x78, 0xdd, 0x26,
0xbb, 0x35, 0xb5, 0xde, 0xc5, 0x6e, 0xb0, 0x11, 0x68, 0x71, 0x24, 0xb3, 0xa2, 0x48, 0x86, 0x1c,
0xd9, 0x75, 0x03, 0xa3, 0x41, 0x4f, 0x41, 0x2f, 0x0d, 0x9a, 0x43, 0x7b, 0x28, 0x8a, 0x20, 0xbd,
0xa4, 0xa7, 0x06, 0x3d, 0xe5, 0x18, 0x14, 0x3d, 0xe4, 0x18, 0xa0, 0x87, 0xe6, 0xe4, 0xc6, 0x52,
0x0f, 0x39, 0x15, 0x39, 0x06, 0x3e, 0x15, 0x1c, 0x0e, 0x25, 0xea, 0xc7, 0x31, 0x25, 0x27, 0x7b,
0x12, 0x47, 0xf3, 0xde, 0xf7, 0xbe, 0xf7, 0xe6, 0xcd, 0x37, 0x43, 0xc2, 0xb4, 0x6e, 0xda, 0xca,
0x9e, 0x62, 0xa4, 0x1d, 0xa2, 0x14, 0xca, 0x19, 0xc5, 0xd2, 0x32, 0x8a, 0x65, 0xe9, 0x5a, 0x41,
0x21, 0x9a, 0x69, 0x38, 0xd8, 0xde, 0xc5, 0x76, 0x7e, 0x0f, 0x6f, 0x4b, 0x96, 0x6d, 0x12, 0x13,
0x8d, 0x12, 0x62, 0x48, 0xcc, 0x45, 0xda, 0x7d, 0x2f, 0xb5, 0x5a, 0xd2, 0xc8, 0x4e, 0x75, 0x5b,
0x2a, 0x98, 0x95, 0x0c, 0x36, 0x76, 0xcd, 0x7d, 0xcb, 0x36, 0x7f, 0xb9, 0x9f, 0xa1, 0xc6, 0x85,
0x74, 0x09, 0x1b, 0xe9, 0x5d, 0x45, 0xd7, 0x54, 0x85, 0xe0, 0x4c, 0xc7, 0x83, 0x07, 0x99, 0x4a,
0x07, 0x20, 0x4a, 0x66, 0xc9, 0xf4, 0x9c, 0xb7, 0xab, 0x45, 0x3a, 0xa2, 0x03, 0xfa, 0xc4, 0xcc,
0xa7, 0x4a, 0xa6, 0x59, 0xd2, 0xb1, 0xc7, 0xd4, 0x30, 0x4c, 0xe2, 0x11, 0x65, 0xb3, 0x93, 0x6c,
0xb6, 0x81, 0x81, 0x2b, 0x16, 0xd9, 0x67, 0x93, 0x33, 0xed, 0x93, 0x45, 0x0d, 0xeb, 0x6a, 0xbe,
0xa2, 0x38, 0x65, 0x66, 0x31, 0xdd, 0x6e, 0x41, 0xb4, 0x0a, 0x76, 0x88, 0x52, 0xb1, 0x98, 0xc1,
0xdb, 0x9d, 0xe5, 0xd2, 0x54, 0x6c, 0x10, 0xad, 0xa8, 0x61, 0x9b, 0x91, 0x10, 0xff, 0x0d, 0xe0,
0xf9, 0xd5, 0x66, 0x11, 0xbf, 0xc0, 0xdb, 0x3b, 0xa6, 0x59, 0xde, 0x68, 0xda, 0x21, 0x05, 0x8e,
0x05, 0xaa, 0x9c, 0xd7, 0x54, 0x87, 0x07, 0x33, 0x60, 0x2e, 0xb9, 0xf4, 0x8e, 0xd4, 0x5a, 0x60,
0x29, 0x80, 0x13, 0x00, 0xc8, 0x8e, 0x1f, 0x67, 0x07, 0x7e, 0x0b, 0x22, 0xe3, 0xe0, 0xd1, 0xe1,
0x34, 0xf7, 0xf8, 0x70, 0x1a, 0xc8, 0xa3, 0x4a, 0xd0, 0xd2, 0x41, 0x39, 0x08, 0xf7, 0xbc, 0xc0,
0x79, 0x4d, 0xe5, 0x23, 0x33, 0x60, 0x2e, 0x91, 0x7d, 0xff, 0x38, 0x3b, 0x6b, 0x8b, 0xfc, 0xec,
0x92, 0x70, 0xf5, 0x8a, 0x92, 0xfe, 0xd5, 0xf7, 0xd2, 0x1f, 0x7c, 0x3d, 0xb7, 0xb2, 0x7c, 0x25,
0xfd, 0xf5, 0x8a, 0x3f, 0x9c, 0xbf, 0xbe, 0xb4, 0x78, 0x30, 0x5b, 0x3b, 0x9c, 0x4e, 0xf8, 0xac,
0xd7, 0xe4, 0xc4, 0x9e, 0x9f, 0x80, 0xf8, 0x6b, 0xf8, 0x9d, 0xce, 0xc4, 0x3e, 0xc3, 0x15, 0x4b,
0x57, 0x08, 0x0e, 0x26, 0xf8, 0x39, 0x4c, 0x12, 0xf6, 0xb7, 0x1b, 0x1e, 0xd0, 0xf0, 0x17, 0xc2,
0x87, 0x87, 0x0d, 0xd0, 0x35, 0x19, 0x92, 0x46, 0x00, 0xf1, 0x7f, 0x00, 0x4e, 0x9f, 0xcc, 0xe0,
0xa2, 0xbb, 0x9e, 0xe8, 0xc7, 0x30, 0xd2, 0x08, 0x99, 0x0e, 0x1f, 0x32, 0xb2, 0xb1, 0x26, 0x47,
0x34, 0x15, 0x4d, 0xc2, 0x98, 0xa1, 0x54, 0x30, 0x2b, 0x59, 0xfc, 0x38, 0x1b, 0xb3, 0x23, 0xfc,
0x39, 0x99, 0xfe, 0x89, 0xe6, 0x61, 0x52, 0xc5, 0x4e, 0xc1, 0xd6, 0x2c, 0x37, 0x3c, 0x1f, 0x0d,
0xda, 0xa8, 0x72, 0x70, 0x0e, 0xbd, 0x05, 0x07, 0x1d, 0x5c, 0xb0, 0x31, 0xe1, 0x63, 0x33, 0x60,
0x6e, 0x48, 0x66, 0x23, 0xb4, 0x08, 0x47, 0x54, 0x5c, 0x54, 0xaa, 0x3a, 0xc9, 0xef, 0x2a, 0x7a,
0x15, 0xf3, 0x03, 0xad, 0x20, 0xc3, 0x6c, 0xf6, 0x73, 0x77, 0x52, 0xbc, 0x9b, 0x84, 0xa9, 0x93,
0x13, 0x46, 0x5f, 0xc2, 0x68, 0xb3, 0x79, 0x2e, 0xbc, 0xa4, 0x79, 0x4e, 0x5e, 0xab, 0x2e, 0xbd,
0xe4, 0x62, 0xbe, 0xb2, 0x3a, 0x48, 0x70, 0x48, 0x37, 0x4b, 0x66, 0xbe, 0x6a, 0xeb, 0xb4, 0x12,
0x89, 0xec, 0xc4, 0x71, 0x76, 0xc0, 0x8e, 0xde, 0x04, 0xa0, 0x76, 0x38, 0x1d, 0xbf, 0x64, 0x96,
0xcc, 0x2d, 0xf9, 0x92, 0x1c, 0x77, 0x8d, 0xb6, 0x6c, 0xdd, 0xb5, 0xd7, 0x8c, 0xa2, 0x67, 0x3f,
0xd0, 0x69, 0xbf, 0x61, 0x14, 0x3d, 0x7b, 0xd7, 0xc8, 0xb5, 0xdf, 0x80, 0x6f, 0xa8, 0x66, 0xa1,
0x5a, 0xc1, 0x86, 0x27, 0x05, 0xd4, 0x71, 0x90, 0x3a, 0x4e, 0x05, 0x1c, 0xc7, 0xd7, 0x82, 0x46,
0x2e, 0xc2, 0x78, 0x8b, 0x1b, 0x0b, 0xbd, 0xad, 0x38, 0x98, 0x22, 0xc4, 0x3b, 0x43, 0x67, 0x15,
0x07, 0xd3, 0xd0, 0xae, 0x91, 0x6b, 0xbf, 0x09, 0xe3, 0x3b, 0x58, 0x51, 0xb1, 0xed, 0xf0, 0x43,
0x33, 0xd1, 0xb9, 0xe4, 0xd2, 0x0f, 0xc2, 0xaf, 0x80, 0xf4, 0xb1, 0xe7, 0xf9, 0x91, 0x41, 0xec,
0x7d, 0xd9, 0xc7, 0x41, 0x2b, 0x70, 0xb0, 0x68, 0xda, 0x15, 0x85, 0xf0, 0x09, 0x4a, 0xe0, 0x5d,
0xaf, 0x81, 0xcf, 0x9d, 0xd6, 0xc0, 0x32, 0x73, 0x43, 0xeb, 0x70, 0x90, 0xca, 0x9a, 0xc3, 0x43,
0x4a, 0x29, 0x13, 0x9e, 0x12, 0xdd, 0x3e, 0x32, 0x73, 0x47, 0x5f, 0xc2, 0xd1, 0xaa, 0xa5, 0x6b,
0x46, 0x39, 0x5f, 0xc1, 0x8e, 0xa3, 0x94, 0x30, 0x9f, 0xa4, 0x5d, 0xb6, 0xd4, 0x43, 0x8e, 0x97,
0x3d, 0x4f, 0x79, 0xc4, 0x43, 0x62, 0x43, 0x94, 0x83, 0xc9, 0x5f, 0x98, 0x9a, 0x91, 0x57, 0x0a,
0x05, 0x6c, 0x11, 0x7e, 0xb8, 0x6f, 0x5c, 0xe8, 0xc2, 0xac, 0x52, 0x14, 0xb4, 0x05, 0x87, 0x55,
0x73, 0xcf, 0xa0, 0x8c, 0x95, 0x42, 0x99, 0x1f, 0xe9, 0x1b, 0x35, 0xe9, 0xe3, 0xac, 0x16, 0xca,
0xe8, 0x0b, 0x38, 0xd2, 0x80, 0x35, 0x5c, 0xdc, 0xd1, 0xbe, 0x71, 0x1b, 0xfc, 0x3e, 0x51, 0xda,
0x80, 0x1d, 0x6c, 0x10, 0x7e, 0xec, 0xec, 0xc0, 0x39, 0x6c, 0x10, 0x74, 0x05, 0x8e, 0x35, 0x80,
0x8b, 0x8a, 0xa6, 0x63, 0x95, 0x1f, 0xef, 0x1b, 0x7a, 0xd4, 0x87, 0xba, 0x48, 0x91, 0x5a, 0xc0,
0xaf, 0x55, 0x71, 0x15, 0xab, 0xfc, 0x1b, 0x67, 0x07, 0xdf, 0xa4, 0x48, 0x2e, 0xb8, 0x6e, 0xb2,
0x33, 0xd1, 0x31, 0xf5, 0x5d, 0xac, 0xf2, 0xa8, 0x7f, 0x70, 0x1f, 0x2a, 0x47, 0x91, 0x52, 0xcb,
0x70, 0x38, 0xb8, 0xe5, 0xd0, 0x38, 0x8c, 0x96, 0xf1, 0xbe, 0x77, 0x4e, 0xc8, 0xee, 0x23, 0x3a,
0x07, 0x07, 0x3c, 0x45, 0xa6, 0x92, 0x27, 0x7b, 0x83, 0xe5, 0xc8, 0x0f, 0x41, 0xea, 0x3c, 0x8c,
0xfb, 0xbd, 0x8b, 0x60, 0xcc, 0x52, 0xc8, 0x0e, 0xf3, 0xa3, 0xcf, 0x62, 0x09, 0x4e, 0x9e, 0x4c,
0xc8, 0x41, 0x1f, 0xc3, 0x84, 0x7f, 0x84, 0xb9, 0x52, 0xed, 0xee, 0xca, 0x85, 0xf0, 0x09, 0xc9,
0x4d, 0x67, 0xf1, 0x4f, 0x49, 0x88, 0x3a, 0x2d, 0xd1, 0x66, 0xf0, 0x14, 0x48, 0x9f, 0x0e, 0x1d,
0x42, 0xfd, 0x7f, 0x02, 0x61, 0xc1, 0xc6, 0x0a, 0xc1, 0x6a, 0x5e, 0x21, 0xb4, 0x20, 0xc9, 0xa5,
0x94, 0xe4, 0x5d, 0x8f, 0x24, 0xff, 0x7a, 0x24, 0x7d, 0xe6, 0x5f, 0x8f, 0xb2, 0x43, 0xae, 0xfb,
0xad, 0xff, 0x4c, 0x03, 0x39, 0xc1, 0xfc, 0x56, 0x89, 0x0b, 0x52, 0xb5, 0x54, 0x1f, 0x24, 0xda,
0x0b, 0x08, 0xf3, 0x5b, 0x25, 0x2d, 0xa2, 0x1c, 0x0b, 0x21, 0xca, 0x1b, 0x4d, 0x51, 0x1e, 0x08,
0xab, 0x80, 0xa7, 0x8a, 0xf1, 0x60, 0x7f, 0x62, 0x7c, 0x15, 0x0e, 0x07, 0xae, 0x41, 0x0e, 0xdb,
0xe2, 0x7d, 0x9e, 0xd3, 0x31, 0xba, 0x3a, 0xc9, 0xe6, 0x6d, 0xc8, 0x41, 0x79, 0x38, 0xd6, 0xc0,
0x67, 0xaa, 0x3f, 0x4e, 0x73, 0xfe, 0x7e, 0x88, 0x9c, 0x5b, 0x64, 0x9f, 0xa5, 0x3e, 0x4a, 0x5a,
0xfe, 0x44, 0x9b, 0x1d, 0x87, 0x40, 0x9c, 0xa6, 0x10, 0xa2, 0x7f, 0x4f, 0x12, 0xff, 0x9f, 0xb5,
0x8a, 0xff, 0x50, 0xcf, 0x78, 0x41, 0xd1, 0xbf, 0xdc, 0x26, 0xfa, 0x89, 0x9e, 0xd1, 0x5a, 0xc4,
0xfe, 0xd3, 0x76, 0xb1, 0x87, 0x3d, 0xe3, 0xb5, 0x8a, 0xfc, 0xa7, 0xed, 0x22, 0x9f, 0xec, 0x1f,
0x90, 0x8a, 0x7b, 0xae, 0x53, 0xdc, 0x87, 0x7b, 0x86, 0x6c, 0x17, 0xf5, 0x5c, 0xa7, 0xa8, 0x8f,
0xf4, 0x0f, 0xca, 0xc4, 0x3c, 0xd7, 0x29, 0xe6, 0xa3, 0xbd, 0x83, 0xbe, 0x42, 0x11, 0x5f, 0x85,
0x13, 0x5d, 0x5a, 0xfe, 0x55, 0x9e, 0x03, 0x5b, 0x70, 0xa2, 0x33, 0x17, 0x07, 0x7d, 0x08, 0x87,
0xd8, 0x2b, 0x94, 0x2f, 0xff, 0xe2, 0xe9, 0x25, 0x90, 0x1b, 0x3e, 0xe2, 0x5f, 0x01, 0xfc, 0x76,
0xa7, 0xc1, 0x45, 0x2a, 0x31, 0x0e, 0xfa, 0x39, 0x8c, 0x7b, 0x6a, 0xe3, 0x83, 0x87, 0xd8, 0xfb,
0xcc, 0x57, 0x62, 0xbf, 0x4c, 0xf6, 0x18, 0x8c, 0x5b, 0xe4, 0xe0, 0x44, 0x2f, 0x15, 0x12, 0xff,
0x0e, 0xe0, 0xd4, 0x3a, 0x26, 0x5d, 0xf2, 0xc1, 0xd7, 0xaa, 0xd8, 0x21, 0xaf, 0xe3, 0xac, 0x5a,
0x81, 0xb0, 0xf9, 0x26, 0x7f, 0xe2, 0x59, 0x45, 0xd7, 0xfc, 0xb2, 0xe2, 0x94, 0xb3, 0x31, 0xd7,
0x5d, 0x4e, 0x14, 0xfd, 0x3f, 0xc4, 0x7f, 0x02, 0x28, 0x5c, 0xd2, 0x9c, 0x2e, 0xac, 0x1d, 0x9f,
0xf6, 0x37, 0xf0, 0xc6, 0x7e, 0xe6, 0x34, 0xfe, 0x06, 0xe0, 0x54, 0xee, 0x65, 0xb5, 0xff, 0x04,
0xc6, 0x59, 0x53, 0x31, 0xf2, 0x21, 0xfa, 0xb0, 0x0b, 0x71, 0x1f, 0xe4, 0xec, 0x8c, 0xff, 0x01,
0xe0, 0x6c, 0xd7, 0x6e, 0x69, 0x5c, 0x7e, 0x18, 0xf3, 0xd7, 0xf8, 0x9e, 0x7b, 0xe6, 0x24, 0x34,
0xf8, 0x4e, 0xf7, 0xe6, 0x69, 0xdc, 0x00, 0xfd, 0x2c, 0x5a, 0x43, 0x81, 0x9e, 0x43, 0x2d, 0xfd,
0x2e, 0xd1, 0xed, 0x6b, 0x80, 0x8c, 0x4b, 0x9a, 0xe3, 0x6e, 0x54, 0x1d, 0xc2, 0x75, 0x4c, 0x7c,
0x61, 0x78, 0xab, 0x03, 0xf9, 0xa3, 0x8a, 0x45, 0xf6, 0x53, 0xf3, 0xa1, 0xf5, 0x41, 0x9c, 0xfc,
0xcd, 0xbf, 0xfe, 0x7b, 0x3b, 0xf2, 0x26, 0x9a, 0xc8, 0x28, 0x4e, 0x86, 0xad, 0x7a, 0x9a, 0xc9,
0x04, 0xba, 0x03, 0x60, 0x72, 0x1d, 0x93, 0xc6, 0xb7, 0x88, 0xf7, 0xdb, 0x71, 0xc3, 0xac, 0x6c,
0xaa, 0x87, 0x9b, 0xb0, 0x98, 0xa1, 0x74, 0xe6, 0xd1, 0xbb, 0x41, 0x3a, 0x8d, 0xdb, 0x71, 0xe6,
0xba, 0xa6, 0x3a, 0x52, 0xe0, 0xbe, 0x75, 0x80, 0x6e, 0x03, 0x38, 0xe2, 0xae, 0x4d, 0xf3, 0x2e,
0xde, 0x21, 0x8e, 0xe1, 0x96, 0x2e, 0xf5, 0xdd, 0xf0, 0x34, 0x1d, 0xf1, 0x3c, 0xe5, 0xf9, 0x2d,
0xf4, 0x66, 0x57, 0x9e, 0xe8, 0x2f, 0x00, 0x46, 0xd7, 0x31, 0x41, 0x8b, 0xa1, 0x0a, 0xe6, 0x33,
0x08, 0xb1, 0x57, 0xc5, 0x9f, 0xd2, 0xc0, 0x6b, 0x28, 0x1b, 0x08, 0xcc, 0xea, 0xd2, 0xa6, 0x5e,
0x6d, 0xe3, 0x03, 0xcf, 0xa8, 0xf9, 0xc5, 0xf0, 0x00, 0xfd, 0x1e, 0xc0, 0x98, 0x5b, 0x1c, 0x24,
0x85, 0x2b, 0x59, 0xa3, 0x54, 0x6f, 0x9f, 0x4e, 0xd4, 0x11, 0x2f, 0x50, 0xa6, 0x19, 0x94, 0x6e,
0x65, 0x7a, 0x0a, 0x4b, 0xf4, 0x02, 0xc0, 0x68, 0xae, 0x5b, 0xe9, 0x72, 0x67, 0x2d, 0xdd, 0x9f,
0x01, 0x65, 0xf4, 0x07, 0x90, 0x92, 0x5b, 0x29, 0xb1, 0x27, 0x29, 0x54, 0x11, 0x83, 0xc6, 0x81,
0x62, 0x2e, 0x83, 0x85, 0xaf, 0x3e, 0x14, 0x3f, 0xe8, 0x1b, 0x78, 0x19, 0x2c, 0xb8, 0xbd, 0x3c,
0xb8, 0x86, 0x75, 0x4c, 0x30, 0xea, 0xed, 0xd8, 0x4c, 0x9d, 0x20, 0x04, 0x62, 0x96, 0x66, 0xfc,
0xa3, 0x85, 0xe5, 0x9e, 0xd6, 0xa0, 0x41, 0xdc, 0x1d, 0x64, 0xef, 0x82, 0x47, 0x47, 0x02, 0x78,
0x7c, 0x24, 0x80, 0x27, 0x47, 0x02, 0xf7, 0xf4, 0x48, 0xe0, 0x9e, 0x1d, 0x09, 0xdc, 0xf3, 0x23,
0x81, 0x7b, 0x71, 0x24, 0x80, 0x1b, 0x35, 0x01, 0xdc, 0xac, 0x09, 0xdc, 0xbd, 0x9a, 0x00, 0xee,
0xd7, 0x04, 0xee, 0x41, 0x4d, 0xe0, 0x1e, 0xd6, 0x04, 0xee, 0x51, 0x4d, 0x00, 0x8f, 0x6b, 0x02,
0x78, 0x52, 0x13, 0xb8, 0xa7, 0x35, 0x01, 0x3c, 0xab, 0x09, 0xdc, 0xf3, 0x9a, 0x00, 0x5e, 0xd4,
0x04, 0xee, 0x46, 0x5d, 0xe0, 0x6e, 0xd6, 0x05, 0x70, 0xab, 0x2e, 0x70, 0x7f, 0xac, 0x0b, 0xe0,
0x4e, 0x5d, 0xe0, 0xee, 0xd5, 0x05, 0xee, 0x7e, 0x5d, 0x00, 0x0f, 0xea, 0x02, 0x78, 0x58, 0x17,
0xc0, 0x57, 0x8b, 0x25, 0x53, 0x22, 0x3b, 0x98, 0xec, 0x68, 0x46, 0xc9, 0x91, 0x0c, 0x4c, 0xf6,
0x4c, 0xbb, 0x9c, 0x69, 0xfd, 0x38, 0x6f, 0x95, 0x4b, 0x19, 0x42, 0x0c, 0x6b, 0x7b, 0x7b, 0x90,
0x26, 0xfe, 0xde, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x92, 0xcd, 0xbf, 0xed, 0x18, 0x00,
0x00,
}
func (this *ApplicationWebhookIdentifiers) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookIdentifiers)
if !ok {
that2, ok := that.(ApplicationWebhookIdentifiers)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationIdentifiers.Equal(&that1.ApplicationIdentifiers) {
return false
}
if this.WebhookID != that1.WebhookID {
return false
}
return true
}
func (this *ApplicationWebhookTemplateIdentifiers) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookTemplateIdentifiers)
if !ok {
that2, ok := that.(ApplicationWebhookTemplateIdentifiers)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.TemplateID != that1.TemplateID {
return false
}
return true
}
func (this *ApplicationWebhookTemplateField) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookTemplateField)
if !ok {
that2, ok := that.(ApplicationWebhookTemplateField)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.ID != that1.ID {
return false
}
if this.Name != that1.Name {
return false
}
if this.Description != that1.Description {
return false
}
if this.Secret != that1.Secret {
return false
}
if this.DefaultValue != that1.DefaultValue {
return false
}
return true
}
func (this *ApplicationWebhookTemplate) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookTemplate)
if !ok {
that2, ok := that.(ApplicationWebhookTemplate)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationWebhookTemplateIdentifiers.Equal(&that1.ApplicationWebhookTemplateIdentifiers) {
return false
}
if this.Name != that1.Name {
return false
}
if this.Description != that1.Description {
return false
}
if this.LogoURL != that1.LogoURL {
return false
}
if this.InfoURL != that1.InfoURL {
return false
}
if this.DocumentationURL != that1.DocumentationURL {
return false
}
if this.BaseURL != that1.BaseURL {
return false
}
if len(this.Headers) != len(that1.Headers) {
return false
}
for i := range this.Headers {
if this.Headers[i] != that1.Headers[i] {
return false
}
}
if this.Format != that1.Format {
return false
}
if len(this.Fields) != len(that1.Fields) {
return false
}
for i := range this.Fields {
if !this.Fields[i].Equal(that1.Fields[i]) {
return false
}
}
if !this.UplinkMessage.Equal(that1.UplinkMessage) {
return false
}
if !this.JoinAccept.Equal(that1.JoinAccept) {
return false
}
if !this.DownlinkAck.Equal(that1.DownlinkAck) {
return false
}
if !this.DownlinkNack.Equal(that1.DownlinkNack) {
return false
}
if !this.DownlinkSent.Equal(that1.DownlinkSent) {
return false
}
if !this.DownlinkFailed.Equal(that1.DownlinkFailed) {
return false
}
if !this.DownlinkQueued.Equal(that1.DownlinkQueued) {
return false
}
if !this.LocationSolved.Equal(that1.LocationSolved) {
return false
}
return true
}
func (this *ApplicationWebhookTemplate_Message) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookTemplate_Message)
if !ok {
that2, ok := that.(ApplicationWebhookTemplate_Message)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.Path != that1.Path {
return false
}
return true
}
func (this *ApplicationWebhookTemplates) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookTemplates)
if !ok {
that2, ok := that.(ApplicationWebhookTemplates)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.Templates) != len(that1.Templates) {
return false
}
for i := range this.Templates {
if !this.Templates[i].Equal(that1.Templates[i]) {
return false
}
}
return true
}
func (this *ApplicationWebhook) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhook)
if !ok {
that2, ok := that.(ApplicationWebhook)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationWebhookIdentifiers.Equal(&that1.ApplicationWebhookIdentifiers) {
return false
}
if !this.CreatedAt.Equal(that1.CreatedAt) {
return false
}
if !this.UpdatedAt.Equal(that1.UpdatedAt) {
return false
}
if this.BaseURL != that1.BaseURL {
return false
}
if len(this.Headers) != len(that1.Headers) {
return false
}
for i := range this.Headers {
if this.Headers[i] != that1.Headers[i] {
return false
}
}
if this.Format != that1.Format {
return false
}
if !this.ApplicationWebhookTemplateIdentifiers.Equal(that1.ApplicationWebhookTemplateIdentifiers) {
return false
}
if len(this.TemplateFields) != len(that1.TemplateFields) {
return false
}
for i := range this.TemplateFields {
if this.TemplateFields[i] != that1.TemplateFields[i] {
return false
}
}
if !this.UplinkMessage.Equal(that1.UplinkMessage) {
return false
}
if !this.JoinAccept.Equal(that1.JoinAccept) {
return false
}
if !this.DownlinkAck.Equal(that1.DownlinkAck) {
return false
}
if !this.DownlinkNack.Equal(that1.DownlinkNack) {
return false
}
if !this.DownlinkSent.Equal(that1.DownlinkSent) {
return false
}
if !this.DownlinkFailed.Equal(that1.DownlinkFailed) {
return false
}
if !this.DownlinkQueued.Equal(that1.DownlinkQueued) {
return false
}
if !this.LocationSolved.Equal(that1.LocationSolved) {
return false
}
return true
}
func (this *ApplicationWebhook_Message) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhook_Message)
if !ok {
that2, ok := that.(ApplicationWebhook_Message)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if this.Path != that1.Path {
return false
}
return true
}
func (this *ApplicationWebhooks) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhooks)
if !ok {
that2, ok := that.(ApplicationWebhooks)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.Webhooks) != len(that1.Webhooks) {
return false
}
for i := range this.Webhooks {
if !this.Webhooks[i].Equal(that1.Webhooks[i]) {
return false
}
}
return true
}
func (this *ApplicationWebhookFormats) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ApplicationWebhookFormats)
if !ok {
that2, ok := that.(ApplicationWebhookFormats)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if len(this.Formats) != len(that1.Formats) {
return false
}
for i := range this.Formats {
if this.Formats[i] != that1.Formats[i] {
return false
}
}
return true
}
func (this *GetApplicationWebhookRequest) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*GetApplicationWebhookRequest)
if !ok {
that2, ok := that.(GetApplicationWebhookRequest)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationWebhookIdentifiers.Equal(&that1.ApplicationWebhookIdentifiers) {
return false
}
if !this.FieldMask.Equal(&that1.FieldMask) {
return false
}
return true
}
func (this *ListApplicationWebhooksRequest) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ListApplicationWebhooksRequest)
if !ok {
that2, ok := that.(ListApplicationWebhooksRequest)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationIdentifiers.Equal(&that1.ApplicationIdentifiers) {
return false
}
if !this.FieldMask.Equal(&that1.FieldMask) {
return false
}
return true
}
func (this *SetApplicationWebhookRequest) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*SetApplicationWebhookRequest)
if !ok {
that2, ok := that.(SetApplicationWebhookRequest)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationWebhook.Equal(&that1.ApplicationWebhook) {
return false
}
if !this.FieldMask.Equal(&that1.FieldMask) {
return false
}
return true
}
func (this *GetApplicationWebhookTemplateRequest) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*GetApplicationWebhookTemplateRequest)
if !ok {
that2, ok := that.(GetApplicationWebhookTemplateRequest)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.ApplicationWebhookTemplateIdentifiers.Equal(&that1.ApplicationWebhookTemplateIdentifiers) {
return false
}
if !this.FieldMask.Equal(&that1.FieldMask) {
return false
}
return true
}
func (this *ListApplicationWebhookTemplatesRequest) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*ListApplicationWebhookTemplatesRequest)
if !ok {
that2, ok := that.(ListApplicationWebhookTemplatesRequest)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.FieldMask.Equal(&that1.FieldMask) {
return false
}
return true
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// ApplicationWebhookRegistryClient is the client API for ApplicationWebhookRegistry service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type ApplicationWebhookRegistryClient interface {
GetFormats(ctx context.Context, in *types.Empty, opts ...grpc.CallOption) (*ApplicationWebhookFormats, error)
GetTemplate(ctx context.Context, in *GetApplicationWebhookTemplateRequest, opts ...grpc.CallOption) (*ApplicationWebhookTemplate, error)
ListTemplates(ctx context.Context, in *ListApplicationWebhookTemplatesRequest, opts ...grpc.CallOption) (*ApplicationWebhookTemplates, error)
Get(ctx context.Context, in *GetApplicationWebhookRequest, opts ...grpc.CallOption) (*ApplicationWebhook, error)
List(ctx context.Context, in *ListApplicationWebhooksRequest, opts ...grpc.CallOption) (*ApplicationWebhooks, error)
Set(ctx context.Context, in *SetApplicationWebhookRequest, opts ...grpc.CallOption) (*ApplicationWebhook, error)
Delete(ctx context.Context, in *ApplicationWebhookIdentifiers, opts ...grpc.CallOption) (*types.Empty, error)
}
type applicationWebhookRegistryClient struct {
cc *grpc.ClientConn
}
func NewApplicationWebhookRegistryClient(cc *grpc.ClientConn) ApplicationWebhookRegistryClient {
return &applicationWebhookRegistryClient{cc}
}
func (c *applicationWebhookRegistryClient) GetFormats(ctx context.Context, in *types.Empty, opts ...grpc.CallOption) (*ApplicationWebhookFormats, error) {
out := new(ApplicationWebhookFormats)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/GetFormats", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationWebhookRegistryClient) GetTemplate(ctx context.Context, in *GetApplicationWebhookTemplateRequest, opts ...grpc.CallOption) (*ApplicationWebhookTemplate, error) {
out := new(ApplicationWebhookTemplate)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/GetTemplate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationWebhookRegistryClient) ListTemplates(ctx context.Context, in *ListApplicationWebhookTemplatesRequest, opts ...grpc.CallOption) (*ApplicationWebhookTemplates, error) {
out := new(ApplicationWebhookTemplates)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/ListTemplates", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationWebhookRegistryClient) Get(ctx context.Context, in *GetApplicationWebhookRequest, opts ...grpc.CallOption) (*ApplicationWebhook, error) {
out := new(ApplicationWebhook)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/Get", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationWebhookRegistryClient) List(ctx context.Context, in *ListApplicationWebhooksRequest, opts ...grpc.CallOption) (*ApplicationWebhooks, error) {
out := new(ApplicationWebhooks)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/List", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationWebhookRegistryClient) Set(ctx context.Context, in *SetApplicationWebhookRequest, opts ...grpc.CallOption) (*ApplicationWebhook, error) {
out := new(ApplicationWebhook)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/Set", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationWebhookRegistryClient) Delete(ctx context.Context, in *ApplicationWebhookIdentifiers, opts ...grpc.CallOption) (*types.Empty, error) {
out := new(types.Empty)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationWebhookRegistry/Delete", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ApplicationWebhookRegistryServer is the server API for ApplicationWebhookRegistry service.
type ApplicationWebhookRegistryServer interface {
GetFormats(context.Context, *types.Empty) (*ApplicationWebhookFormats, error)
GetTemplate(context.Context, *GetApplicationWebhookTemplateRequest) (*ApplicationWebhookTemplate, error)
ListTemplates(context.Context, *ListApplicationWebhookTemplatesRequest) (*ApplicationWebhookTemplates, error)
Get(context.Context, *GetApplicationWebhookRequest) (*ApplicationWebhook, error)
List(context.Context, *ListApplicationWebhooksRequest) (*ApplicationWebhooks, error)
Set(context.Context, *SetApplicationWebhookRequest) (*ApplicationWebhook, error)
Delete(context.Context, *ApplicationWebhookIdentifiers) (*types.Empty, error)
}
// UnimplementedApplicationWebhookRegistryServer can be embedded to have forward compatible implementations.
type UnimplementedApplicationWebhookRegistryServer struct {
}
func (*UnimplementedApplicationWebhookRegistryServer) GetFormats(ctx context.Context, req *types.Empty) (*ApplicationWebhookFormats, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetFormats not implemented")
}
func (*UnimplementedApplicationWebhookRegistryServer) GetTemplate(ctx context.Context, req *GetApplicationWebhookTemplateRequest) (*ApplicationWebhookTemplate, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetTemplate not implemented")
}
func (*UnimplementedApplicationWebhookRegistryServer) ListTemplates(ctx context.Context, req *ListApplicationWebhookTemplatesRequest) (*ApplicationWebhookTemplates, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListTemplates not implemented")
}
func (*UnimplementedApplicationWebhookRegistryServer) Get(ctx context.Context, req *GetApplicationWebhookRequest) (*ApplicationWebhook, error) {
return nil, status.Errorf(codes.Unimplemented, "method Get not implemented")
}
func (*UnimplementedApplicationWebhookRegistryServer) List(ctx context.Context, req *ListApplicationWebhooksRequest) (*ApplicationWebhooks, error) {
return nil, status.Errorf(codes.Unimplemented, "method List not implemented")
}
func (*UnimplementedApplicationWebhookRegistryServer) Set(ctx context.Context, req *SetApplicationWebhookRequest) (*ApplicationWebhook, error) {
return nil, status.Errorf(codes.Unimplemented, "method Set not implemented")
}
func (*UnimplementedApplicationWebhookRegistryServer) Delete(ctx context.Context, req *ApplicationWebhookIdentifiers) (*types.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method Delete not implemented")
}
func RegisterApplicationWebhookRegistryServer(s *grpc.Server, srv ApplicationWebhookRegistryServer) {
s.RegisterService(&_ApplicationWebhookRegistry_serviceDesc, srv)
}
func _ApplicationWebhookRegistry_GetFormats_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(types.Empty)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).GetFormats(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/GetFormats",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).GetFormats(ctx, req.(*types.Empty))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationWebhookRegistry_GetTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetApplicationWebhookTemplateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).GetTemplate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/GetTemplate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).GetTemplate(ctx, req.(*GetApplicationWebhookTemplateRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationWebhookRegistry_ListTemplates_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListApplicationWebhookTemplatesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).ListTemplates(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/ListTemplates",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).ListTemplates(ctx, req.(*ListApplicationWebhookTemplatesRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationWebhookRegistry_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetApplicationWebhookRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).Get(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/Get",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).Get(ctx, req.(*GetApplicationWebhookRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationWebhookRegistry_List_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListApplicationWebhooksRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).List(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/List",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).List(ctx, req.(*ListApplicationWebhooksRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationWebhookRegistry_Set_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetApplicationWebhookRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).Set(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/Set",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).Set(ctx, req.(*SetApplicationWebhookRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationWebhookRegistry_Delete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ApplicationWebhookIdentifiers)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationWebhookRegistryServer).Delete(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationWebhookRegistry/Delete",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationWebhookRegistryServer).Delete(ctx, req.(*ApplicationWebhookIdentifiers))
}
return interceptor(ctx, in, info, handler)
}
var _ApplicationWebhookRegistry_serviceDesc = grpc.ServiceDesc{
ServiceName: "ttn.lorawan.v3.ApplicationWebhookRegistry",
HandlerType: (*ApplicationWebhookRegistryServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetFormats",
Handler: _ApplicationWebhookRegistry_GetFormats_Handler,
},
{
MethodName: "GetTemplate",
Handler: _ApplicationWebhookRegistry_GetTemplate_Handler,
},
{
MethodName: "ListTemplates",
Handler: _ApplicationWebhookRegistry_ListTemplates_Handler,
},
{
MethodName: "Get",
Handler: _ApplicationWebhookRegistry_Get_Handler,
},
{
MethodName: "List",
Handler: _ApplicationWebhookRegistry_List_Handler,
},
{
MethodName: "Set",
Handler: _ApplicationWebhookRegistry_Set_Handler,
},
{
MethodName: "Delete",
Handler: _ApplicationWebhookRegistry_Delete_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "lorawan-stack/api/applicationserver_web.proto",
}
func (m *ApplicationWebhookIdentifiers) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookIdentifiers) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookIdentifiers) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.WebhookID) > 0 {
i -= len(m.WebhookID)
copy(dAtA[i:], m.WebhookID)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.WebhookID)))
i--
dAtA[i] = 0x12
}
{
size, err := m.ApplicationIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *ApplicationWebhookTemplateIdentifiers) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookTemplateIdentifiers) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookTemplateIdentifiers) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.TemplateID) > 0 {
i -= len(m.TemplateID)
copy(dAtA[i:], m.TemplateID)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.TemplateID)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *ApplicationWebhookTemplateField) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookTemplateField) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookTemplateField) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.DefaultValue) > 0 {
i -= len(m.DefaultValue)
copy(dAtA[i:], m.DefaultValue)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.DefaultValue)))
i--
dAtA[i] = 0x2a
}
if m.Secret {
i--
if m.Secret {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i--
dAtA[i] = 0x20
}
if len(m.Description) > 0 {
i -= len(m.Description)
copy(dAtA[i:], m.Description)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Description)))
i--
dAtA[i] = 0x1a
}
if len(m.Name) > 0 {
i -= len(m.Name)
copy(dAtA[i:], m.Name)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Name)))
i--
dAtA[i] = 0x12
}
if len(m.ID) > 0 {
i -= len(m.ID)
copy(dAtA[i:], m.ID)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.ID)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *ApplicationWebhookTemplate) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookTemplate) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookTemplate) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.LocationSolved != nil {
{
size, err := m.LocationSolved.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x1
i--
dAtA[i] = 0x92
}
if m.DownlinkQueued != nil {
{
size, err := m.DownlinkQueued.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x1
i--
dAtA[i] = 0x8a
}
if m.DownlinkFailed != nil {
{
size, err := m.DownlinkFailed.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x1
i--
dAtA[i] = 0x82
}
if m.DownlinkSent != nil {
{
size, err := m.DownlinkSent.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x7a
}
if m.DownlinkNack != nil {
{
size, err := m.DownlinkNack.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x72
}
if m.DownlinkAck != nil {
{
size, err := m.DownlinkAck.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x6a
}
if m.JoinAccept != nil {
{
size, err := m.JoinAccept.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x62
}
if m.UplinkMessage != nil {
{
size, err := m.UplinkMessage.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x5a
}
if len(m.Fields) > 0 {
for iNdEx := len(m.Fields) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.Fields[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x52
}
}
if len(m.Format) > 0 {
i -= len(m.Format)
copy(dAtA[i:], m.Format)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Format)))
i--
dAtA[i] = 0x4a
}
if len(m.Headers) > 0 {
for k := range m.Headers {
v := m.Headers[k]
baseI := i
i -= len(v)
copy(dAtA[i:], v)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(v)))
i--
dAtA[i] = 0x12
i -= len(k)
copy(dAtA[i:], k)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(k)))
i--
dAtA[i] = 0xa
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(baseI-i))
i--
dAtA[i] = 0x42
}
}
if len(m.BaseURL) > 0 {
i -= len(m.BaseURL)
copy(dAtA[i:], m.BaseURL)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.BaseURL)))
i--
dAtA[i] = 0x3a
}
if len(m.DocumentationURL) > 0 {
i -= len(m.DocumentationURL)
copy(dAtA[i:], m.DocumentationURL)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.DocumentationURL)))
i--
dAtA[i] = 0x32
}
if len(m.InfoURL) > 0 {
i -= len(m.InfoURL)
copy(dAtA[i:], m.InfoURL)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.InfoURL)))
i--
dAtA[i] = 0x2a
}
if len(m.LogoURL) > 0 {
i -= len(m.LogoURL)
copy(dAtA[i:], m.LogoURL)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.LogoURL)))
i--
dAtA[i] = 0x22
}
if len(m.Description) > 0 {
i -= len(m.Description)
copy(dAtA[i:], m.Description)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Description)))
i--
dAtA[i] = 0x1a
}
if len(m.Name) > 0 {
i -= len(m.Name)
copy(dAtA[i:], m.Name)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Name)))
i--
dAtA[i] = 0x12
}
{
size, err := m.ApplicationWebhookTemplateIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *ApplicationWebhookTemplate_Message) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookTemplate_Message) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookTemplate_Message) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Path) > 0 {
i -= len(m.Path)
copy(dAtA[i:], m.Path)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Path)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *ApplicationWebhookTemplates) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookTemplates) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookTemplates) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Templates) > 0 {
for iNdEx := len(m.Templates) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.Templates[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
}
return len(dAtA) - i, nil
}
func (m *ApplicationWebhook) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhook) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhook) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.TemplateFields) > 0 {
for k := range m.TemplateFields {
v := m.TemplateFields[k]
baseI := i
i -= len(v)
copy(dAtA[i:], v)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(v)))
i--
dAtA[i] = 0x12
i -= len(k)
copy(dAtA[i:], k)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(k)))
i--
dAtA[i] = 0xa
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(baseI-i))
i--
dAtA[i] = 0x1
i--
dAtA[i] = 0x82
}
}
if m.ApplicationWebhookTemplateIdentifiers != nil {
{
size, err := m.ApplicationWebhookTemplateIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x7a
}
if m.LocationSolved != nil {
{
size, err := m.LocationSolved.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x72
}
if m.DownlinkQueued != nil {
{
size, err := m.DownlinkQueued.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x6a
}
if m.DownlinkFailed != nil {
{
size, err := m.DownlinkFailed.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x62
}
if m.DownlinkSent != nil {
{
size, err := m.DownlinkSent.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x5a
}
if m.DownlinkNack != nil {
{
size, err := m.DownlinkNack.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x52
}
if m.DownlinkAck != nil {
{
size, err := m.DownlinkAck.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x4a
}
if m.JoinAccept != nil {
{
size, err := m.JoinAccept.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x42
}
if m.UplinkMessage != nil {
{
size, err := m.UplinkMessage.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x3a
}
if len(m.Format) > 0 {
i -= len(m.Format)
copy(dAtA[i:], m.Format)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Format)))
i--
dAtA[i] = 0x32
}
if len(m.Headers) > 0 {
for k := range m.Headers {
v := m.Headers[k]
baseI := i
i -= len(v)
copy(dAtA[i:], v)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(v)))
i--
dAtA[i] = 0x12
i -= len(k)
copy(dAtA[i:], k)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(k)))
i--
dAtA[i] = 0xa
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(baseI-i))
i--
dAtA[i] = 0x2a
}
}
if len(m.BaseURL) > 0 {
i -= len(m.BaseURL)
copy(dAtA[i:], m.BaseURL)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.BaseURL)))
i--
dAtA[i] = 0x22
}
n20, err20 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.UpdatedAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.UpdatedAt):])
if err20 != nil {
return 0, err20
}
i -= n20
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(n20))
i--
dAtA[i] = 0x1a
n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.CreatedAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.CreatedAt):])
if err21 != nil {
return 0, err21
}
i -= n21
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(n21))
i--
dAtA[i] = 0x12
{
size, err := m.ApplicationWebhookIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *ApplicationWebhook_Message) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhook_Message) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhook_Message) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Path) > 0 {
i -= len(m.Path)
copy(dAtA[i:], m.Path)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(m.Path)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *ApplicationWebhooks) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhooks) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhooks) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Webhooks) > 0 {
for iNdEx := len(m.Webhooks) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.Webhooks[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
}
return len(dAtA) - i, nil
}
func (m *ApplicationWebhookFormats) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ApplicationWebhookFormats) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ApplicationWebhookFormats) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.Formats) > 0 {
for k := range m.Formats {
v := m.Formats[k]
baseI := i
i -= len(v)
copy(dAtA[i:], v)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(v)))
i--
dAtA[i] = 0x12
i -= len(k)
copy(dAtA[i:], k)
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(len(k)))
i--
dAtA[i] = 0xa
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(baseI-i))
i--
dAtA[i] = 0xa
}
}
return len(dAtA) - i, nil
}
func (m *GetApplicationWebhookRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *GetApplicationWebhookRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *GetApplicationWebhookRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
{
size, err := m.FieldMask.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
{
size, err := m.ApplicationWebhookIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *ListApplicationWebhooksRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ListApplicationWebhooksRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ListApplicationWebhooksRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
{
size, err := m.FieldMask.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
{
size, err := m.ApplicationIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *SetApplicationWebhookRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SetApplicationWebhookRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *SetApplicationWebhookRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
{
size, err := m.FieldMask.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
{
size, err := m.ApplicationWebhook.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *GetApplicationWebhookTemplateRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *GetApplicationWebhookTemplateRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *GetApplicationWebhookTemplateRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
{
size, err := m.FieldMask.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
{
size, err := m.ApplicationWebhookTemplateIdentifiers.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func (m *ListApplicationWebhookTemplatesRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ListApplicationWebhookTemplatesRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ListApplicationWebhookTemplatesRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
{
size, err := m.FieldMask.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintApplicationserverWeb(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func encodeVarintApplicationserverWeb(dAtA []byte, offset int, v uint64) int {
offset -= sovApplicationserverWeb(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func NewPopulatedApplicationWebhookIdentifiers(r randyApplicationserverWeb, easy bool) *ApplicationWebhookIdentifiers |
func NewPopulatedApplicationWebhookTemplateIdentifiers(r randyApplicationserverWeb, easy bool) *ApplicationWebhookTemplateIdentifiers {
this := &ApplicationWebhookTemplateIdentifiers{}
this.TemplateID = randStringApplicationserverWeb(r)
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhookTemplateField(r randyApplicationserverWeb, easy bool) *ApplicationWebhookTemplateField {
this := &ApplicationWebhookTemplateField{}
this.ID = randStringApplicationserverWeb(r)
this.Name = randStringApplicationserverWeb(r)
this.Description = randStringApplicationserverWeb(r)
this.Secret = bool(r.Intn(2) == 0)
this.DefaultValue = randStringApplicationserverWeb(r)
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhookTemplate(r randyApplicationserverWeb, easy bool) *ApplicationWebhookTemplate {
this := &ApplicationWebhookTemplate{}
v2 := NewPopulatedApplicationWebhookTemplateIdentifiers(r, easy)
this.ApplicationWebhookTemplateIdentifiers = *v2
this.Name = randStringApplicationserverWeb(r)
this.Description = randStringApplicationserverWeb(r)
this.LogoURL = randStringApplicationserverWeb(r)
this.InfoURL = randStringApplicationserverWeb(r)
this.DocumentationURL = randStringApplicationserverWeb(r)
this.BaseURL = randStringApplicationserverWeb(r)
if r.Intn(5) != 0 {
v3 := r.Intn(10)
this.Headers = make(map[string]string)
for i := 0; i < v3; i++ {
this.Headers[randStringApplicationserverWeb(r)] = randStringApplicationserverWeb(r)
}
}
this.Format = randStringApplicationserverWeb(r)
if r.Intn(5) != 0 {
v4 := r.Intn(5)
this.Fields = make([]*ApplicationWebhookTemplateField, v4)
for i := 0; i < v4; i++ {
this.Fields[i] = NewPopulatedApplicationWebhookTemplateField(r, easy)
}
}
if r.Intn(5) != 0 {
this.UplinkMessage = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.JoinAccept = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkAck = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkNack = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkSent = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkFailed = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkQueued = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if r.Intn(5) != 0 {
this.LocationSolved = NewPopulatedApplicationWebhookTemplate_Message(r, easy)
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhookTemplate_Message(r randyApplicationserverWeb, easy bool) *ApplicationWebhookTemplate_Message {
this := &ApplicationWebhookTemplate_Message{}
this.Path = randStringApplicationserverWeb(r)
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhookTemplates(r randyApplicationserverWeb, easy bool) *ApplicationWebhookTemplates {
this := &ApplicationWebhookTemplates{}
if r.Intn(5) != 0 {
v5 := r.Intn(5)
this.Templates = make([]*ApplicationWebhookTemplate, v5)
for i := 0; i < v5; i++ {
this.Templates[i] = NewPopulatedApplicationWebhookTemplate(r, easy)
}
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhook(r randyApplicationserverWeb, easy bool) *ApplicationWebhook {
this := &ApplicationWebhook{}
v6 := NewPopulatedApplicationWebhookIdentifiers(r, easy)
this.ApplicationWebhookIdentifiers = *v6
v7 := github_com_gogo_protobuf_types.NewPopulatedStdTime(r, easy)
this.CreatedAt = *v7
v8 := github_com_gogo_protobuf_types.NewPopulatedStdTime(r, easy)
this.UpdatedAt = *v8
this.BaseURL = randStringApplicationserverWeb(r)
if r.Intn(5) != 0 {
v9 := r.Intn(10)
this.Headers = make(map[string]string)
for i := 0; i < v9; i++ {
this.Headers[randStringApplicationserverWeb(r)] = randStringApplicationserverWeb(r)
}
}
this.Format = randStringApplicationserverWeb(r)
if r.Intn(5) != 0 {
this.UplinkMessage = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.JoinAccept = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkAck = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkNack = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkSent = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkFailed = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.DownlinkQueued = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.LocationSolved = NewPopulatedApplicationWebhook_Message(r, easy)
}
if r.Intn(5) != 0 {
this.ApplicationWebhookTemplateIdentifiers = NewPopulatedApplicationWebhookTemplateIdentifiers(r, easy)
}
if r.Intn(5) != 0 {
v10 := r.Intn(10)
this.TemplateFields = make(map[string]string)
for i := 0; i < v10; i++ {
this.TemplateFields[randStringApplicationserverWeb(r)] = randStringApplicationserverWeb(r)
}
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhook_Message(r randyApplicationserverWeb, easy bool) *ApplicationWebhook_Message {
this := &ApplicationWebhook_Message{}
this.Path = randStringApplicationserverWeb(r)
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhooks(r randyApplicationserverWeb, easy bool) *ApplicationWebhooks {
this := &ApplicationWebhooks{}
if r.Intn(5) != 0 {
v11 := r.Intn(5)
this.Webhooks = make([]*ApplicationWebhook, v11)
for i := 0; i < v11; i++ {
this.Webhooks[i] = NewPopulatedApplicationWebhook(r, easy)
}
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedApplicationWebhookFormats(r randyApplicationserverWeb, easy bool) *ApplicationWebhookFormats {
this := &ApplicationWebhookFormats{}
if r.Intn(5) != 0 {
v12 := r.Intn(10)
this.Formats = make(map[string]string)
for i := 0; i < v12; i++ {
this.Formats[randStringApplicationserverWeb(r)] = randStringApplicationserverWeb(r)
}
}
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedGetApplicationWebhookRequest(r randyApplicationserverWeb, easy bool) *GetApplicationWebhookRequest {
this := &GetApplicationWebhookRequest{}
v13 := NewPopulatedApplicationWebhookIdentifiers(r, easy)
this.ApplicationWebhookIdentifiers = *v13
v14 := types.NewPopulatedFieldMask(r, easy)
this.FieldMask = *v14
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedListApplicationWebhooksRequest(r randyApplicationserverWeb, easy bool) *ListApplicationWebhooksRequest {
this := &ListApplicationWebhooksRequest{}
v15 := NewPopulatedApplicationIdentifiers(r, easy)
this.ApplicationIdentifiers = *v15
v16 := types.NewPopulatedFieldMask(r, easy)
this.FieldMask = *v16
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedSetApplicationWebhookRequest(r randyApplicationserverWeb, easy bool) *SetApplicationWebhookRequest {
this := &SetApplicationWebhookRequest{}
v17 := NewPopulatedApplicationWebhook(r, easy)
this.ApplicationWebhook = *v17
v18 := types.NewPopulatedFieldMask(r, easy)
this.FieldMask = *v18
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedGetApplicationWebhookTemplateRequest(r randyApplicationserverWeb, easy bool) *GetApplicationWebhookTemplateRequest {
this := &GetApplicationWebhookTemplateRequest{}
v19 := NewPopulatedApplicationWebhookTemplateIdentifiers(r, easy)
this.ApplicationWebhookTemplateIdentifiers = *v19
v20 := types.NewPopulatedFieldMask(r, easy)
this.FieldMask = *v20
if !easy && r.Intn(10) != 0 {
}
return this
}
func NewPopulatedListApplicationWebhookTemplatesRequest(r randyApplicationserverWeb, easy bool) *ListApplicationWebhookTemplatesRequest {
this := &ListApplicationWebhookTemplatesRequest{}
v21 := types.NewPopulatedFieldMask(r, easy)
this.FieldMask = *v21
if !easy && r.Intn(10) != 0 {
}
return this
}
type randyApplicationserverWeb interface {
Float32() float32
Float64() float64
Int63() int64
Int31() int32
Uint32() uint32
Intn(n int) int
}
func randUTF8RuneApplicationserverWeb(r randyApplicationserverWeb) rune {
ru := r.Intn(62)
if ru < 10 {
return rune(ru + 48)
} else if ru < 36 {
return rune(ru + 55)
}
return rune(ru + 61)
}
func randStringApplicationserverWeb(r randyApplicationserverWeb) string {
v22 := r.Intn(100)
tmps := make([]rune, v22)
for i := 0; i < v22; i++ {
tmps[i] = randUTF8RuneApplicationserverWeb(r)
}
return string(tmps)
}
func randUnrecognizedApplicationserverWeb(r randyApplicationserverWeb, maxFieldNumber int) (dAtA []byte) {
l := r.Intn(5)
for i := 0; i < l; i++ {
wire := r.Intn(4)
if wire == 3 {
wire = 5
}
fieldNumber := maxFieldNumber + r.Intn(100)
dAtA = randFieldApplicationserverWeb(dAtA, r, fieldNumber, wire)
}
return dAtA
}
func randFieldApplicationserverWeb(dAtA []byte, r randyApplicationserverWeb, fieldNumber int, wire int) []byte {
key := uint32(fieldNumber)<<3 | uint32(wire)
switch wire {
case 0:
dAtA = encodeVarintPopulateApplicationserverWeb(dAtA, uint64(key))
v23 := r.Int63()
if r.Intn(2) == 0 {
v23 *= -1
}
dAtA = encodeVarintPopulateApplicationserverWeb(dAtA, uint64(v23))
case 1:
dAtA = encodeVarintPopulateApplicationserverWeb(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
case 2:
dAtA = encodeVarintPopulateApplicationserverWeb(dAtA, uint64(key))
ll := r.Intn(100)
dAtA = encodeVarintPopulateApplicationserverWeb(dAtA, uint64(ll))
for j := 0; j < ll; j++ {
dAtA = append(dAtA, byte(r.Intn(256)))
}
default:
dAtA = encodeVarintPopulateApplicationserverWeb(dAtA, uint64(key))
dAtA = append(dAtA, byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)), byte(r.Intn(256)))
}
return dAtA
}
func encodeVarintPopulateApplicationserverWeb(dAtA []byte, v uint64) []byte {
for v >= 1<<7 {
dAtA = append(dAtA, uint8(v&0x7f|0x80))
v >>= 7
}
dAtA = append(dAtA, uint8(v))
return dAtA
}
func (m *ApplicationWebhookIdentifiers) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = len(m.WebhookID)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
return n
}
func (m *ApplicationWebhookTemplateIdentifiers) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.TemplateID)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
return n
}
func (m *ApplicationWebhookTemplateField) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.ID)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.Name)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.Description)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.Secret {
n += 2
}
l = len(m.DefaultValue)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
return n
}
func (m *ApplicationWebhookTemplate) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationWebhookTemplateIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = len(m.Name)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.Description)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.LogoURL)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.InfoURL)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.DocumentationURL)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
l = len(m.BaseURL)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if len(m.Headers) > 0 {
for k, v := range m.Headers {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovApplicationserverWeb(uint64(len(k))) + 1 + len(v) + sovApplicationserverWeb(uint64(len(v)))
n += mapEntrySize + 1 + sovApplicationserverWeb(uint64(mapEntrySize))
}
}
l = len(m.Format)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if len(m.Fields) > 0 {
for _, e := range m.Fields {
l = e.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
}
if m.UplinkMessage != nil {
l = m.UplinkMessage.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.JoinAccept != nil {
l = m.JoinAccept.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkAck != nil {
l = m.DownlinkAck.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkNack != nil {
l = m.DownlinkNack.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkSent != nil {
l = m.DownlinkSent.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkFailed != nil {
l = m.DownlinkFailed.Size()
n += 2 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkQueued != nil {
l = m.DownlinkQueued.Size()
n += 2 + l + sovApplicationserverWeb(uint64(l))
}
if m.LocationSolved != nil {
l = m.LocationSolved.Size()
n += 2 + l + sovApplicationserverWeb(uint64(l))
}
return n
}
func (m *ApplicationWebhookTemplate_Message) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.Path)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
return n
}
func (m *ApplicationWebhookTemplates) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.Templates) > 0 {
for _, e := range m.Templates {
l = e.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
}
return n
}
func (m *ApplicationWebhook) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationWebhookIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = github_com_gogo_protobuf_types.SizeOfStdTime(m.CreatedAt)
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = github_com_gogo_protobuf_types.SizeOfStdTime(m.UpdatedAt)
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = len(m.BaseURL)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if len(m.Headers) > 0 {
for k, v := range m.Headers {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovApplicationserverWeb(uint64(len(k))) + 1 + len(v) + sovApplicationserverWeb(uint64(len(v)))
n += mapEntrySize + 1 + sovApplicationserverWeb(uint64(mapEntrySize))
}
}
l = len(m.Format)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.UplinkMessage != nil {
l = m.UplinkMessage.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.JoinAccept != nil {
l = m.JoinAccept.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkAck != nil {
l = m.DownlinkAck.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkNack != nil {
l = m.DownlinkNack.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkSent != nil {
l = m.DownlinkSent.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkFailed != nil {
l = m.DownlinkFailed.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.DownlinkQueued != nil {
l = m.DownlinkQueued.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.LocationSolved != nil {
l = m.LocationSolved.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if m.ApplicationWebhookTemplateIdentifiers != nil {
l = m.ApplicationWebhookTemplateIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
if len(m.TemplateFields) > 0 {
for k, v := range m.TemplateFields {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovApplicationserverWeb(uint64(len(k))) + 1 + len(v) + sovApplicationserverWeb(uint64(len(v)))
n += mapEntrySize + 2 + sovApplicationserverWeb(uint64(mapEntrySize))
}
}
return n
}
func (m *ApplicationWebhook_Message) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.Path)
if l > 0 {
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
return n
}
func (m *ApplicationWebhooks) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.Webhooks) > 0 {
for _, e := range m.Webhooks {
l = e.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
}
}
return n
}
func (m *ApplicationWebhookFormats) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.Formats) > 0 {
for k, v := range m.Formats {
_ = k
_ = v
mapEntrySize := 1 + len(k) + sovApplicationserverWeb(uint64(len(k))) + 1 + len(v) + sovApplicationserverWeb(uint64(len(v)))
n += mapEntrySize + 1 + sovApplicationserverWeb(uint64(mapEntrySize))
}
}
return n
}
func (m *GetApplicationWebhookRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationWebhookIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = m.FieldMask.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
return n
}
func (m *ListApplicationWebhooksRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = m.FieldMask.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
return n
}
func (m *SetApplicationWebhookRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationWebhook.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = m.FieldMask.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
return n
}
func (m *GetApplicationWebhookTemplateRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.ApplicationWebhookTemplateIdentifiers.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
l = m.FieldMask.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
return n
}
func (m *ListApplicationWebhookTemplatesRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.FieldMask.Size()
n += 1 + l + sovApplicationserverWeb(uint64(l))
return n
}
func sovApplicationserverWeb(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozApplicationserverWeb(x uint64) (n int) {
return sovApplicationserverWeb((x << 1) ^ uint64((int64(x) >> 63)))
}
func (this *ApplicationWebhookIdentifiers) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ApplicationWebhookIdentifiers{`,
`ApplicationIdentifiers:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ApplicationIdentifiers), "ApplicationIdentifiers", "ApplicationIdentifiers", 1), `&`, ``, 1) + `,`,
`WebhookID:` + fmt.Sprintf("%v", this.WebhookID) + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhookTemplateIdentifiers) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ApplicationWebhookTemplateIdentifiers{`,
`TemplateID:` + fmt.Sprintf("%v", this.TemplateID) + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhookTemplateField) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ApplicationWebhookTemplateField{`,
`ID:` + fmt.Sprintf("%v", this.ID) + `,`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`Description:` + fmt.Sprintf("%v", this.Description) + `,`,
`Secret:` + fmt.Sprintf("%v", this.Secret) + `,`,
`DefaultValue:` + fmt.Sprintf("%v", this.DefaultValue) + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhookTemplate) String() string {
if this == nil {
return "nil"
}
repeatedStringForFields := "[]*ApplicationWebhookTemplateField{"
for _, f := range this.Fields {
repeatedStringForFields += strings.Replace(f.String(), "ApplicationWebhookTemplateField", "ApplicationWebhookTemplateField", 1) + ","
}
repeatedStringForFields += "}"
keysForHeaders := make([]string, 0, len(this.Headers))
for k := range this.Headers {
keysForHeaders = append(keysForHeaders, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForHeaders)
mapStringForHeaders := "map[string]string{"
for _, k := range keysForHeaders {
mapStringForHeaders += fmt.Sprintf("%v: %v,", k, this.Headers[k])
}
mapStringForHeaders += "}"
s := strings.Join([]string{`&ApplicationWebhookTemplate{`,
`ApplicationWebhookTemplateIdentifiers:` + strings.Replace(strings.Replace(this.ApplicationWebhookTemplateIdentifiers.String(), "ApplicationWebhookTemplateIdentifiers", "ApplicationWebhookTemplateIdentifiers", 1), `&`, ``, 1) + `,`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`Description:` + fmt.Sprintf("%v", this.Description) + `,`,
`LogoURL:` + fmt.Sprintf("%v", this.LogoURL) + `,`,
`InfoURL:` + fmt.Sprintf("%v", this.InfoURL) + `,`,
`DocumentationURL:` + fmt.Sprintf("%v", this.DocumentationURL) + `,`,
`BaseURL:` + fmt.Sprintf("%v", this.BaseURL) + `,`,
`Headers:` + mapStringForHeaders + `,`,
`Format:` + fmt.Sprintf("%v", this.Format) + `,`,
`Fields:` + repeatedStringForFields + `,`,
`UplinkMessage:` + strings.Replace(fmt.Sprintf("%v", this.UplinkMessage), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`JoinAccept:` + strings.Replace(fmt.Sprintf("%v", this.JoinAccept), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`DownlinkAck:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkAck), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`DownlinkNack:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkNack), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`DownlinkSent:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkSent), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`DownlinkFailed:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkFailed), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`DownlinkQueued:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkQueued), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`LocationSolved:` + strings.Replace(fmt.Sprintf("%v", this.LocationSolved), "ApplicationWebhookTemplate_Message", "ApplicationWebhookTemplate_Message", 1) + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhookTemplate_Message) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ApplicationWebhookTemplate_Message{`,
`Path:` + fmt.Sprintf("%v", this.Path) + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhookTemplates) String() string {
if this == nil {
return "nil"
}
repeatedStringForTemplates := "[]*ApplicationWebhookTemplate{"
for _, f := range this.Templates {
repeatedStringForTemplates += strings.Replace(f.String(), "ApplicationWebhookTemplate", "ApplicationWebhookTemplate", 1) + ","
}
repeatedStringForTemplates += "}"
s := strings.Join([]string{`&ApplicationWebhookTemplates{`,
`Templates:` + repeatedStringForTemplates + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhook) String() string {
if this == nil {
return "nil"
}
keysForHeaders := make([]string, 0, len(this.Headers))
for k := range this.Headers {
keysForHeaders = append(keysForHeaders, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForHeaders)
mapStringForHeaders := "map[string]string{"
for _, k := range keysForHeaders {
mapStringForHeaders += fmt.Sprintf("%v: %v,", k, this.Headers[k])
}
mapStringForHeaders += "}"
keysForTemplateFields := make([]string, 0, len(this.TemplateFields))
for k := range this.TemplateFields {
keysForTemplateFields = append(keysForTemplateFields, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForTemplateFields)
mapStringForTemplateFields := "map[string]string{"
for _, k := range keysForTemplateFields {
mapStringForTemplateFields += fmt.Sprintf("%v: %v,", k, this.TemplateFields[k])
}
mapStringForTemplateFields += "}"
s := strings.Join([]string{`&ApplicationWebhook{`,
`ApplicationWebhookIdentifiers:` + strings.Replace(strings.Replace(this.ApplicationWebhookIdentifiers.String(), "ApplicationWebhookIdentifiers", "ApplicationWebhookIdentifiers", 1), `&`, ``, 1) + `,`,
`CreatedAt:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.CreatedAt), "Timestamp", "types.Timestamp", 1), `&`, ``, 1) + `,`,
`UpdatedAt:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.UpdatedAt), "Timestamp", "types.Timestamp", 1), `&`, ``, 1) + `,`,
`BaseURL:` + fmt.Sprintf("%v", this.BaseURL) + `,`,
`Headers:` + mapStringForHeaders + `,`,
`Format:` + fmt.Sprintf("%v", this.Format) + `,`,
`UplinkMessage:` + strings.Replace(fmt.Sprintf("%v", this.UplinkMessage), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`JoinAccept:` + strings.Replace(fmt.Sprintf("%v", this.JoinAccept), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`DownlinkAck:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkAck), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`DownlinkNack:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkNack), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`DownlinkSent:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkSent), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`DownlinkFailed:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkFailed), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`DownlinkQueued:` + strings.Replace(fmt.Sprintf("%v", this.DownlinkQueued), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`LocationSolved:` + strings.Replace(fmt.Sprintf("%v", this.LocationSolved), "ApplicationWebhook_Message", "ApplicationWebhook_Message", 1) + `,`,
`ApplicationWebhookTemplateIdentifiers:` + strings.Replace(this.ApplicationWebhookTemplateIdentifiers.String(), "ApplicationWebhookTemplateIdentifiers", "ApplicationWebhookTemplateIdentifiers", 1) + `,`,
`TemplateFields:` + mapStringForTemplateFields + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhook_Message) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ApplicationWebhook_Message{`,
`Path:` + fmt.Sprintf("%v", this.Path) + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhooks) String() string {
if this == nil {
return "nil"
}
repeatedStringForWebhooks := "[]*ApplicationWebhook{"
for _, f := range this.Webhooks {
repeatedStringForWebhooks += strings.Replace(f.String(), "ApplicationWebhook", "ApplicationWebhook", 1) + ","
}
repeatedStringForWebhooks += "}"
s := strings.Join([]string{`&ApplicationWebhooks{`,
`Webhooks:` + repeatedStringForWebhooks + `,`,
`}`,
}, "")
return s
}
func (this *ApplicationWebhookFormats) String() string {
if this == nil {
return "nil"
}
keysForFormats := make([]string, 0, len(this.Formats))
for k := range this.Formats {
keysForFormats = append(keysForFormats, k)
}
github_com_gogo_protobuf_sortkeys.Strings(keysForFormats)
mapStringForFormats := "map[string]string{"
for _, k := range keysForFormats {
mapStringForFormats += fmt.Sprintf("%v: %v,", k, this.Formats[k])
}
mapStringForFormats += "}"
s := strings.Join([]string{`&ApplicationWebhookFormats{`,
`Formats:` + mapStringForFormats + `,`,
`}`,
}, "")
return s
}
func (this *GetApplicationWebhookRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&GetApplicationWebhookRequest{`,
`ApplicationWebhookIdentifiers:` + strings.Replace(strings.Replace(this.ApplicationWebhookIdentifiers.String(), "ApplicationWebhookIdentifiers", "ApplicationWebhookIdentifiers", 1), `&`, ``, 1) + `,`,
`FieldMask:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.FieldMask), "FieldMask", "types.FieldMask", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *ListApplicationWebhooksRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ListApplicationWebhooksRequest{`,
`ApplicationIdentifiers:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.ApplicationIdentifiers), "ApplicationIdentifiers", "ApplicationIdentifiers", 1), `&`, ``, 1) + `,`,
`FieldMask:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.FieldMask), "FieldMask", "types.FieldMask", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *SetApplicationWebhookRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&SetApplicationWebhookRequest{`,
`ApplicationWebhook:` + strings.Replace(strings.Replace(this.ApplicationWebhook.String(), "ApplicationWebhook", "ApplicationWebhook", 1), `&`, ``, 1) + `,`,
`FieldMask:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.FieldMask), "FieldMask", "types.FieldMask", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *GetApplicationWebhookTemplateRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&GetApplicationWebhookTemplateRequest{`,
`ApplicationWebhookTemplateIdentifiers:` + strings.Replace(strings.Replace(this.ApplicationWebhookTemplateIdentifiers.String(), "ApplicationWebhookTemplateIdentifiers", "ApplicationWebhookTemplateIdentifiers", 1), `&`, ``, 1) + `,`,
`FieldMask:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.FieldMask), "FieldMask", "types.FieldMask", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *ListApplicationWebhookTemplatesRequest) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ListApplicationWebhookTemplatesRequest{`,
`FieldMask:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.FieldMask), "FieldMask", "types.FieldMask", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func valueToStringApplicationserverWeb(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *ApplicationWebhookIdentifiers) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhookIdentifiers: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhookIdentifiers: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field WebhookID", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.WebhookID = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhookTemplateIdentifiers) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhookTemplateIdentifiers: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhookTemplateIdentifiers: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field TemplateID", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.TemplateID = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhookTemplateField) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhookTemplateField: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhookTemplateField: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ID = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Description", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Description = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Secret", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.Secret = bool(v != 0)
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DefaultValue", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.DefaultValue = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhookTemplate) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhookTemplate: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhookTemplate: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationWebhookTemplateIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationWebhookTemplateIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Description", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Description = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field LogoURL", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.LogoURL = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field InfoURL", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.InfoURL = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DocumentationURL", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.DocumentationURL = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 7:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field BaseURL", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.BaseURL = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 8:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Headers == nil {
m.Headers = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Headers[mapkey] = mapvalue
iNdEx = postIndex
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Format", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Format = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Fields", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Fields = append(m.Fields, &ApplicationWebhookTemplateField{})
if err := m.Fields[len(m.Fields)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 11:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field UplinkMessage", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.UplinkMessage == nil {
m.UplinkMessage = &ApplicationWebhookTemplate_Message{}
}
if err := m.UplinkMessage.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 12:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field JoinAccept", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.JoinAccept == nil {
m.JoinAccept = &ApplicationWebhookTemplate_Message{}
}
if err := m.JoinAccept.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 13:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkAck", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkAck == nil {
m.DownlinkAck = &ApplicationWebhookTemplate_Message{}
}
if err := m.DownlinkAck.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 14:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkNack", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkNack == nil {
m.DownlinkNack = &ApplicationWebhookTemplate_Message{}
}
if err := m.DownlinkNack.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 15:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkSent", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkSent == nil {
m.DownlinkSent = &ApplicationWebhookTemplate_Message{}
}
if err := m.DownlinkSent.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 16:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkFailed", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkFailed == nil {
m.DownlinkFailed = &ApplicationWebhookTemplate_Message{}
}
if err := m.DownlinkFailed.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 17:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkQueued", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkQueued == nil {
m.DownlinkQueued = &ApplicationWebhookTemplate_Message{}
}
if err := m.DownlinkQueued.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 18:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field LocationSolved", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.LocationSolved == nil {
m.LocationSolved = &ApplicationWebhookTemplate_Message{}
}
if err := m.LocationSolved.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhookTemplate_Message) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Message: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Message: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Path = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhookTemplates) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhookTemplates: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhookTemplates: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Templates", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Templates = append(m.Templates, &ApplicationWebhookTemplate{})
if err := m.Templates[len(m.Templates)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhook) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhook: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhook: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationWebhookIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationWebhookIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field CreatedAt", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.CreatedAt, dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field UpdatedAt", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.UpdatedAt, dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field BaseURL", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.BaseURL = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Headers == nil {
m.Headers = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Headers[mapkey] = mapvalue
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Format", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Format = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 7:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field UplinkMessage", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.UplinkMessage == nil {
m.UplinkMessage = &ApplicationWebhook_Message{}
}
if err := m.UplinkMessage.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 8:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field JoinAccept", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.JoinAccept == nil {
m.JoinAccept = &ApplicationWebhook_Message{}
}
if err := m.JoinAccept.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkAck", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkAck == nil {
m.DownlinkAck = &ApplicationWebhook_Message{}
}
if err := m.DownlinkAck.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkNack", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkNack == nil {
m.DownlinkNack = &ApplicationWebhook_Message{}
}
if err := m.DownlinkNack.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 11:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkSent", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkSent == nil {
m.DownlinkSent = &ApplicationWebhook_Message{}
}
if err := m.DownlinkSent.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 12:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkFailed", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkFailed == nil {
m.DownlinkFailed = &ApplicationWebhook_Message{}
}
if err := m.DownlinkFailed.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 13:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field DownlinkQueued", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.DownlinkQueued == nil {
m.DownlinkQueued = &ApplicationWebhook_Message{}
}
if err := m.DownlinkQueued.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 14:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field LocationSolved", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.LocationSolved == nil {
m.LocationSolved = &ApplicationWebhook_Message{}
}
if err := m.LocationSolved.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 15:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationWebhookTemplateIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.ApplicationWebhookTemplateIdentifiers == nil {
m.ApplicationWebhookTemplateIdentifiers = &ApplicationWebhookTemplateIdentifiers{}
}
if err := m.ApplicationWebhookTemplateIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 16:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field TemplateFields", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.TemplateFields == nil {
m.TemplateFields = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.TemplateFields[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhook_Message) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Message: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Message: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Path = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhooks) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhooks: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhooks: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Webhooks", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Webhooks = append(m.Webhooks, &ApplicationWebhook{})
if err := m.Webhooks[len(m.Webhooks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ApplicationWebhookFormats) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ApplicationWebhookFormats: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ApplicationWebhookFormats: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Formats", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Formats == nil {
m.Formats = make(map[string]string)
}
var mapkey string
var mapvalue string
for iNdEx < postIndex {
entryPreIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
if fieldNum == 1 {
var stringLenmapkey uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapkey := int(stringLenmapkey)
if intStringLenmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
mapkey = string(dAtA[iNdEx:postStringIndexmapkey])
iNdEx = postStringIndexmapkey
} else if fieldNum == 2 {
var stringLenmapvalue uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapvalue |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLenmapvalue := int(stringLenmapvalue)
if intStringLenmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postStringIndexmapvalue := iNdEx + intStringLenmapvalue
if postStringIndexmapvalue < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postStringIndexmapvalue > l {
return io.ErrUnexpectedEOF
}
mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue])
iNdEx = postStringIndexmapvalue
} else {
iNdEx = entryPreIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > postIndex {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
m.Formats[mapkey] = mapvalue
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *GetApplicationWebhookRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: GetApplicationWebhookRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: GetApplicationWebhookRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationWebhookIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationWebhookIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FieldMask", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.FieldMask.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ListApplicationWebhooksRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ListApplicationWebhooksRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ListApplicationWebhooksRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FieldMask", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.FieldMask.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SetApplicationWebhookRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SetApplicationWebhookRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SetApplicationWebhookRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationWebhook", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationWebhook.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FieldMask", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.FieldMask.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *GetApplicationWebhookTemplateRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: GetApplicationWebhookTemplateRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: GetApplicationWebhookTemplateRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ApplicationWebhookTemplateIdentifiers", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ApplicationWebhookTemplateIdentifiers.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FieldMask", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.FieldMask.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ListApplicationWebhookTemplatesRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ListApplicationWebhookTemplatesRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ListApplicationWebhookTemplatesRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FieldMask", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthApplicationserverWeb
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.FieldMask.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipApplicationserverWeb(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApplicationserverWeb
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipApplicationserverWeb(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthApplicationserverWeb
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthApplicationserverWeb
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowApplicationserverWeb
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipApplicationserverWeb(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthApplicationserverWeb
}
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthApplicationserverWeb = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowApplicationserverWeb = fmt.Errorf("proto: integer overflow")
)
| {
this := &ApplicationWebhookIdentifiers{}
v1 := NewPopulatedApplicationIdentifiers(r, easy)
this.ApplicationIdentifiers = *v1
this.WebhookID = randStringApplicationserverWeb(r)
if !easy && r.Intn(10) != 0 {
}
return this
} |
tfs.d.ts | // Type definitions for Microsoft Visual Studio Services v95.20160208.1028
// Project: http://www.visualstudio.com/integrate/extensions/overview
// Definitions by: Microsoft <[email protected]>
/// <reference path='vss.d.ts' />
declare module "TFS/Build/Contracts" {
import TFS_Core_Contracts = require("TFS/Core/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
export interface AgentPoolQueue extends ShallowReference {
_links: any;
/**
* The pool used by this queue.
*/
pool: TaskAgentPoolReference;
}
export enum AgentStatus {
/**
* Indicates that the build agent cannot be contacted.
*/
Unavailable = 0,
/**
* Indicates that the build agent is currently available.
*/
Available = 1,
/**
* Indicates that the build agent has taken itself offline.
*/
Offline = 2,
}
export interface ArtifactResource {
_links: any;
/**
* The type-specific resource data. For example, "#/10002/5/drop", "$/drops/5", "\\myshare\myfolder\mydrops\5"
*/
data: string;
/**
* Link to the resource. This might include things like query parameters to download as a zip file
*/
downloadUrl: string;
/**
* Properties of Artifact Resource
*/
properties: {
[key: string]: string;
};
/**
* The type of the resource: File container, version control folder, UNC path, etc.
*/
type: string;
/**
* Link to the resource
*/
url: string;
}
export enum AuditAction {
Add = 1,
Update = 2,
Delete = 3,
}
/**
* Data representation of a build
*/
export interface Build {
_links: any;
/**
* Build number/name of the build
*/
buildNumber: string;
/**
* Build number revision
*/
buildNumberRevision: number;
/**
* The build controller. This should only be set if the definition type is Xaml.
*/
controller: BuildController;
/**
* The definition associated with the build
*/
definition: DefinitionReference;
/**
* Indicates whether the build has been deleted.
*/
deleted: boolean;
/**
* Demands
*/
demands: any[];
/**
* Time that the build was completed
*/
finishTime: Date;
/**
* Id of the build
*/
id: number;
keepForever: boolean;
/**
* Process or person that last changed the build
*/
lastChangedBy: VSS_Common_Contracts.IdentityRef;
/**
* Date the build was last changed
*/
lastChangedDate: Date;
/**
* Log location of the build
*/
logs: BuildLogReference;
/**
* Orchestration plan for the build
*/
orchestrationPlan: TaskOrchestrationPlanReference;
/**
* Parameters for the build
*/
parameters: string;
/**
* The build's priority
*/
priority: QueuePriority;
/**
* The team project
*/
project: TFS_Core_Contracts.TeamProjectReference;
properties: any;
/**
* Quality of the xaml build (good, bad, etc.)
*/
quality: string;
/**
* The queue. This should only be set if the definition type is Build.
*/
queue: AgentPoolQueue;
/**
* Queue option of the build.
*/
queueOptions: QueueOptions;
/**
* The current position of the build in the queue
*/
queuePosition: number;
/**
* Time that the build was queued
*/
queueTime: Date;
/**
* Reason that the build was created
*/
reason: BuildReason;
/**
* The repository
*/
repository: BuildRepository;
/**
* The identity that queued the build
*/
requestedBy: VSS_Common_Contracts.IdentityRef;
/**
* The identity on whose behalf the build was queued
*/
requestedFor: VSS_Common_Contracts.IdentityRef;
/**
* The build result
*/
result: BuildResult;
/**
* Source branch
*/
sourceBranch: string;
/**
* Source version
*/
sourceVersion: string;
/**
* Time that the build was started
*/
startTime: Date;
/**
* Status of the build
*/
status: BuildStatus;
tags: string[];
/**
* Uri of the build
*/
uri: string;
/**
* REST url of the build
*/
url: string;
validationResults: BuildRequestValidationResult[];
}
export interface BuildAgent {
buildDirectory: string;
controller: ShallowReference;
createdDate: Date;
description: string;
enabled: boolean;
id: number;
messageQueueUrl: string;
name: string;
reservedForBuild: string;
server: ShallowReference;
status: AgentStatus;
statusMessage: string;
updatedDate: Date;
uri: string;
url: string;
}
export interface BuildArtifact {
/**
* The artifact id
*/
id: number;
/**
* The name of the artifact
*/
name: string;
/**
* The actual resource
*/
resource: ArtifactResource;
}
export interface BuildArtifactAddedEvent extends BuildUpdatedEvent {
artifact: BuildArtifact;
}
export enum BuildAuthorizationScope {
/**
* The identity used should have build service account permissions scoped to the project collection. This is useful when resources for a single build are spread across multiple projects.
*/
ProjectCollection = 1,
/**
* The identity used should have build service account permissions scoped to the project in which the build definition resides. This is useful for isolation of build jobs to a particular team project to avoid any unintentional escalation of privilege attacks during a build.
*/
Project = 2,
}
/**
* Data representation of a build badge
*/
export interface BuildBadge {
/**
* Build id, if exists that this badge corresponds to
*/
buildId: number;
/**
* Self Url that generates SVG
*/
imageUrl: string;
}
export interface BuildChangesCalculatedEvent extends BuildUpdatedEvent {
changes: Change[];
}
export interface BuildCompletedEvent extends BuildUpdatedEvent {
}
export interface BuildController extends ShallowReference {
_links: any;
/**
* The date the controller was created.
*/
createdDate: Date;
/**
* The description of the controller.
*/
description: string;
/**
* Indicates whether the controller is enabled.
*/
enabled: boolean;
/**
* The status of the controller.
*/
status: ControllerStatus;
/**
* The date the controller was last updated.
*/
updatedDate: Date;
/**
* The controller's URI.
*/
uri: string;
}
export interface BuildDefinition extends BuildDefinitionReference {
_links: any;
/**
* Indicates whether badges are enabled for this definition
*/
badgeEnabled: boolean;
build: BuildDefinitionStep[];
/**
* The build number format
*/
buildNumberFormat: string;
/**
* The comment entered when saving the definition
*/
comment: string;
demands: any[];
/**
* The description
*/
description: string;
/**
* The drop location for the definition
*/
dropLocation: string;
/**
* Gets or sets the job authorization scope for builds which are queued against this definition
*/
jobAuthorizationScope: BuildAuthorizationScope;
/**
* Gets or sets the job execution timeout in minutes for builds which are queued against this definition
*/
jobTimeoutInMinutes: number;
options: BuildOption[];
properties: any;
/**
* The repository
*/
repository: BuildRepository;
retentionRules: RetentionPolicy[];
triggers: BuildTrigger[];
variables: {
[key: string]: BuildDefinitionVariable;
};
}
export interface BuildDefinitionChangedEvent {
changeType: AuditAction;
definition: BuildDefinition;
}
export interface BuildDefinitionChangingEvent {
changeType: AuditAction;
newDefinition: BuildDefinition;
originalDefinition: BuildDefinition;
}
export interface BuildDefinitionReference extends DefinitionReference {
/**
* The author of the definition.
*/
authoredBy: VSS_Common_Contracts.IdentityRef;
/**
* If this is a draft definition, it might have a parent
*/
draftOf: DefinitionReference;
/**
* The quality of the definition document (draft, etc.)
*/
quality: DefinitionQuality;
/**
* The default queue which should be used for requests.
*/
queue: AgentPoolQueue;
}
export interface BuildDefinitionRevision {
changedBy: VSS_Common_Contracts.IdentityRef;
changedDate: Date;
changeType: AuditAction;
comment: string;
definitionUrl: string;
name: string;
revision: number;
}
export interface BuildDefinitionSourceProvider {
/**
* Uri of the associated definition
*/
definitionUri: string;
/**
* fields associated with this build definition
*/
fields: {
[key: string]: string;
};
/**
* Id of this source provider
*/
id: number;
/**
* The lst time this source provider was modified
*/
lastModified: Date;
/**
* Name of the source provider
*/
name: string;
/**
* Which trigger types are supported by this definition source provider
*/
supportedTriggerTypes: DefinitionTriggerType;
}
export interface BuildDefinitionStep {
alwaysRun: boolean;
continueOnError: boolean;
displayName: string;
enabled: boolean;
inputs: {
[key: string]: string;
};
task: TaskDefinitionReference;
}
export interface BuildDefinitionTemplate {
canDelete: boolean;
category: string;
description: string;
iconTaskId: string;
id: string;
name: string;
template: BuildDefinition;
}
export interface BuildDefinitionVariable {
allowOverride: boolean;
isSecret: boolean;
value: string;
}
export interface BuildDeletedEvent extends RealtimeBuildEvent {
build: Build;
}
export interface BuildDeployment {
deployment: BuildSummary;
sourceBuild: ShallowReference;
}
export interface BuildDestroyedEvent extends RealtimeBuildEvent {
build: Build;
}
/**
* Represents a build log.
*/
export interface BuildLog extends BuildLogReference {
/**
* The date the log was created.
*/
createdOn: Date;
/**
* The date the log was last changed.
*/
lastChangedOn: Date;
/**
* The number of lines in the log.
*/
lineCount: number;
}
/**
* Data representation of a build log reference
*/
export interface BuildLogReference {
/**
* The id of the log.
*/
id: number;
/**
* The type of the log location.
*/
type: string;
/**
* Full link to the log resource.
*/
url: string;
}
export interface BuildOption {
definition: BuildOptionDefinitionReference;
enabled: boolean;
inputs: {
[key: string]: string;
};
}
export interface BuildOptionDefinition extends BuildOptionDefinitionReference {
description: string;
groups: BuildOptionGroupDefinition[];
inputs: BuildOptionInputDefinition[];
name: string;
ordinal: number;
}
export interface BuildOptionDefinitionReference {
id: string;
}
export interface BuildOptionGroupDefinition {
displayName: string;
isExpanded: boolean;
name: string;
}
export interface BuildOptionInputDefinition {
defaultValue: string;
groupName: string;
help: {
[key: string]: string;
};
label: string;
name: string;
options: {
[key: string]: string;
};
required: boolean;
type: BuildOptionInputType;
visibleRule: string;
}
export enum BuildOptionInputType {
String = 0,
Boolean = 1,
StringList = 2,
Radio = 3,
PickList = 4,
MultiLine = 5,
}
export enum BuildPhaseStatus {
/**
* The state is not known.
*/
Unknown = 0,
/**
* The build phase completed unsuccessfully.
*/
Failed = 1,
/**
* The build phase completed successfully.
*/
Succeeded = 2,
}
export interface BuildPollingSummaryEvent {
}
export interface BuildProcessTemplate {
description: string;
fileExists: boolean;
id: number;
parameters: string;
serverPath: string;
supportedReasons: BuildReason;
teamProject: string;
templateType: ProcessTemplateType;
url: string;
version: string;
}
export enum BuildQueryOrder {
/**
* Order by finish time ascending.
*/
FinishTimeAscending = 2,
/**
* Order by finish time descending.
*/
FinishTimeDescending = 3,
}
export interface BuildQueuedEvent extends BuildUpdatedEvent {
}
export enum BuildReason {
/**
* No reason. This value should not be used.
*/
None = 0,
/**
* The build was started manually.
*/
Manual = 1,
/**
* The build was started for the trigger TriggerType.ContinuousIntegration.
*/
IndividualCI = 2,
/**
* The build was started for the trigger TriggerType.BatchedContinuousIntegration.
*/
BatchedCI = 4,
/**
* The build was started for the trigger TriggerType.Schedule.
*/
Schedule = 8,
/**
* The build was created by a user.
*/
UserCreated = 32,
/**
* The build was started manually for private validation.
*/
ValidateShelveset = 64,
/**
* The build was started for the trigger ContinuousIntegrationType.Gated.
*/
CheckInShelveset = 128,
/**
* The build was triggered for retention policy purposes.
*/
Triggered = 175,
/**
* All reasons.
*/
All = 239,
}
export interface BuildReference {
_links: any;
/**
* Build number/name of the build
*/
buildNumber: string;
/**
* Time that the build was completed
*/
finishTime: Date;
/**
* Id of the build
*/
id: number;
/**
* Time that the build was queued
*/
queueTime: Date;
/**
* The build result
*/
result: BuildResult;
/**
* Time that the build was started
*/
startTime: Date;
/**
* Status of the build
*/
status: BuildStatus;
}
export interface BuildReportMetadata {
buildId: number;
content: string;
type: string;
}
export interface BuildRepository {
checkoutSubmodules: boolean;
/**
* Indicates whether to clean the target folder when getting code from the repository. This is a String so that it can reference variables.
*/
clean: string;
/**
* Gets or sets the name of the default branch.
*/
defaultBranch: string;
id: string;
/**
* Gets or sets the friendly name of the repository.
*/
name: string;
properties: {
[key: string]: string;
};
/**
* Gets or sets the root folder.
*/
rootFolder: string;
/**
* Gets or sets the type of the repository.
*/
type: string;
/**
* Gets or sets the url of the repository.
*/
url: string;
}
export interface BuildRequestValidationResult {
message: string;
result: ValidationResult;
}
export interface BuildResourceUsage {
distributedTaskAgents: number;
totalUsage: number;
xamlControllers: number;
}
export enum BuildResult {
/**
* No result
*/
None = 0,
/**
* The build completed successfully.
*/
Succeeded = 2,
/**
* The build completed compilation successfully but had other errors.
*/
PartiallySucceeded = 4,
/**
* The build completed unsuccessfully.
*/
Failed = 8,
/**
* The build was canceled before starting.
*/
Canceled = 32,
}
export interface BuildServer {
agents: ShallowReference[];
controller: ShallowReference;
id: number;
isVirtual: boolean;
messageQueueUrl: string;
name: string;
requireClientCertificates: boolean;
status: ServiceHostStatus;
statusChangedDate: Date;
uri: string;
url: string;
version: number;
}
export interface BuildSettings {
daysToKeepDeletedBuildsBeforeDestroy: number;
defaultRetentionPolicy: RetentionPolicy;
maximumRetentionPolicy: RetentionPolicy;
}
export interface BuildStartedEvent extends BuildUpdatedEvent {
}
export enum BuildStatus {
/**
* No status.
*/
None = 0,
/**
* The build is currently in progress.
*/
InProgress = 1,
/**
* The build has completed.
*/
Completed = 2,
/**
* The build is cancelling
*/
Cancelling = 4,
/**
* The build is inactive in the queue.
*/
Postponed = 8,
/**
* The build has not yet started.
*/
NotStarted = 32,
/**
* All status.
*/
All = 47,
}
export interface BuildSummary {
build: ShallowReference;
finishTime: Date;
keepForever: boolean;
quality: string;
reason: BuildReason;
requestedFor: VSS_Common_Contracts.IdentityRef;
startTime: Date;
status: BuildStatus;
}
export interface BuildTrigger {
triggerType: DefinitionTriggerType;
}
export interface BuildUpdatedEvent extends RealtimeBuildEvent {
build: Build;
}
export interface BuildWorkspace {
mappings: MappingDetails[];
}
/**
* Represents a change associated with a build.
*/
export interface Change {
/**
* The author of the change.
*/
author: VSS_Common_Contracts.IdentityRef;
/**
* The location of a user-friendly representation of the resource.
*/
displayUri: string;
/**
* Something that identifies the change. For a commit, this would be the SHA1. For a TFVC changeset, this would be the changeset id.
*/
id: string;
/**
* The location of the full representation of the resource.
*/
location: string;
/**
* A description of the change. This might be a commit message or changeset description.
*/
message: string;
/**
* Indicates whether the message was truncated
*/
messageTruncated: boolean;
/**
* A timestamp for the change.
*/
timestamp: Date;
/**
* The type of change. "commit", "changeset", etc.
*/
type: string;
}
export interface ConsoleLogEvent extends RealtimeBuildEvent {
lines: string[];
timelineId: string;
timelineRecordId: string;
}
export interface ContinuousDeploymentDefinition {
/**
* The connected service associated with the continuous deployment
*/
connectedService: TFS_Core_Contracts.WebApiConnectedServiceRef;
/**
* The definition associated with the continuous deployment
*/
definition: ShallowReference;
gitBranch: string;
hostedServiceName: string;
project: TFS_Core_Contracts.TeamProjectReference;
repositoryId: string;
storageAccountName: string;
subscriptionId: string;
website: string;
webspace: string;
}
export interface ContinuousIntegrationTrigger extends BuildTrigger {
batchChanges: boolean;
branchFilters: string[];
maxConcurrentBuildsPerBranch: number;
/**
* The polling interval in seconds.
*/
pollingInterval: number;
/**
* This is the id of the polling job that polls the external repository. Once the build definition is saved/updated, this value is set.
*/
pollingJobId: string;
}
export enum ControllerStatus {
/**
* Indicates that the build controller cannot be contacted.
*/
Unavailable = 0,
/**
* Indicates that the build controller is currently available.
*/
Available = 1,
/**
* Indicates that the build controller has taken itself offline.
*/
Offline = 2,
}
export enum DefinitionQuality {
Definition = 1,
Draft = 2,
}
export enum DefinitionQueryOrder {
/**
* No order
*/
None = 0,
/**
* Order by created on/last modified time ascending.
*/
LastModifiedAscending = 1,
/**
* Order by created on/last modified time descending.
*/
LastModifiedDescending = 2,
}
export enum DefinitionQueueStatus {
/**
* When enabled the definition queue allows builds to be queued by users, the system will queue scheduled, gated and continuous integration builds, and the queued builds will be started by the system.
*/
Enabled = 0,
/**
* When paused the definition queue allows builds to be queued by users and the system will queue scheduled, gated and continuous integration builds. Builds in the queue will not be started by the system.
*/
Paused = 1,
/**
* When disabled the definition queue will not allow builds to be queued by users and the system will not queue scheduled, gated or continuous integration builds. Builds already in the queue will not be started by the system.
*/
Disabled = 2,
}
/**
* A reference to a definition.
*/
export interface DefinitionReference extends ShallowReference {
/**
* The date the definition was created
*/
createdDate: Date;
/**
* The project.
*/
project: TFS_Core_Contracts.TeamProjectReference;
/**
* If builds can be queued from this definition
*/
queueStatus: DefinitionQueueStatus;
/**
* The definition revision number.
*/
revision: number;
/**
* The type of the definition.
*/
type: DefinitionType;
/**
* The Uri of the definition
*/
uri: string;
}
export enum DefinitionTriggerType {
/**
* Manual builds only.
*/
None = 1,
/**
* A build should be started for each changeset.
*/
ContinuousIntegration = 2,
/**
* A build should be started for multiple changesets at a time at a specified interval.
*/
BatchedContinuousIntegration = 4,
/**
* A build should be started on a specified schedule whether or not changesets exist.
*/
Schedule = 8,
/**
* A validation build should be started for each check-in.
*/
GatedCheckIn = 16,
/**
* A validation build should be started for each batch of check-ins.
*/
BatchedGatedCheckIn = 32,
/**
* All types.
*/
All = 63,
}
export enum DefinitionType {
Xaml = 1,
Build = 2,
}
export enum DeleteOptions {
/**
* No data should be deleted. This value should not be used.
*/
None = 0,
/**
* The drop location should be deleted.
*/
DropLocation = 1,
/**
* The test results should be deleted.
*/
TestResults = 2,
/**
* The version control label should be deleted.
*/
Label = 4,
/**
* The build should be deleted.
*/
Details = 8,
/**
* Published symbols should be deleted.
*/
Symbols = 16,
/**
* All data should be deleted.
*/
All = 31,
}
/**
* Represents the data from the build information nodes for type "DeploymentInformation" for xaml builds
*/
export interface Deployment {
type: string;
}
/**
* Deployment iformation for type "Build"
*/
export interface DeploymentBuild extends Deployment {
buildId: number;
}
/**
* Deployment iformation for type "Deploy"
*/
export interface DeploymentDeploy extends Deployment {
message: string;
}
/**
* Deployment iformation for type "Test"
*/
export interface DeploymentTest extends Deployment {
runId: number;
}
export interface GatedCheckInTrigger extends BuildTrigger {
pathFilters: string[];
runContinuousIntegration: boolean;
}
export enum GetOption {
/**
* Use the latest changeset at the time the build is queued.
*/
LatestOnQueue = 0,
/**
* Use the latest changeset at the time the build is started.
*/
LatestOnBuild = 1,
/**
* A user-specified version has been supplied.
*/
Custom = 2,
}
/**
* Data representation of an information node associated with a build
*/
export interface InformationNode {
/**
* Fields of the information node
*/
fields: {
[key: string]: string;
};
/**
* Process or person that last modified this node
*/
lastModifiedBy: string;
/**
* Date this node was last modified
*/
lastModifiedDate: Date;
/**
* Node Id of this information node
*/
nodeId: number;
/**
* Id of parent node (xml tree)
*/
parentId: number;
/**
* The type of the information node
*/
type: string;
}
export interface Issue {
category: string;
data: {
[key: string]: string;
};
message: string;
type: IssueType;
}
export enum IssueType {
Error = 1,
Warning = 2,
}
export interface MappingDetails {
localPath: string;
mappingType: string;
serverPath: string;
}
export enum ProcessTemplateType {
/**
* Indicates a custom template.
*/
Custom = 0,
/**
* Indicates a default template.
*/
Default = 1,
/**
* Indicates an upgrade template.
*/
Upgrade = 2,
}
export interface PropertyValue {
/**
* Guid of identity that changed this property value
*/
changedBy: string;
/**
* The date this property value was changed
*/
changedDate: Date;
/**
* Name in the name value mapping
*/
propertyName: string;
/**
* Value in the name value mapping
*/
value: any;
}
export enum QueryDeletedOption {
/**
* Include only non-deleted builds.
*/
ExcludeDeleted = 0,
/**
* Include deleted and non-deleted builds.
*/
IncludeDeleted = 1,
/**
* Include only deleted builds.
*/
OnlyDeleted = 2,
}
export enum QueueOptions {
/**
* No queue options
*/
None = 0,
/**
* Create a plan Id for the build, do not run it
*/
DoNotRun = 1,
}
export enum QueuePriority {
/**
* Low priority.
*/
Low = 5,
/**
* Below normal priority.
*/
BelowNormal = 4,
/**
* Normal priority.
*/
Normal = 3,
/**
* Above normal priority.
*/
AboveNormal = 2,
/**
* High priority.
*/
High = 1,
}
export interface RealtimeBuildEvent {
buildId: number;
}
export interface RequestReference {
/**
* Id of the resource
*/
id: number;
/**
* Name of the requestor
*/
requestedFor: VSS_Common_Contracts.IdentityRef;
/**
* Full http link to the resource
*/
url: string;
}
export interface RetentionPolicy {
artifacts: string[];
branches: string[];
daysToKeep: number;
deleteBuildRecord: boolean;
deleteTestResults: boolean;
minimumToKeep: number;
}
export interface Schedule {
branchFilters: string[];
/**
* Days for a build (flags enum for days of the week)
*/
daysToBuild: ScheduleDays;
/**
* The Job Id of the Scheduled job that will queue the scheduled build. Since a single trigger can have multiple schedules and we want a single job to process a single schedule (since each schedule has a list of branches to build), the schedule itself needs to define the Job Id. This value will be filled in when a definition is added or updated. The UI does not provide it or use it.
*/
scheduleJobId: string;
/**
* Local timezone hour to start
*/
startHours: number;
/**
* Local timezone minute to start
*/
startMinutes: number;
/**
* Time zone of the build schedule (string representation of the time zone id)
*/
timeZoneId: string;
}
export enum ScheduleDays {
/**
* Do not run.
*/
None = 0,
/**
* Run on Monday.
*/
Monday = 1,
/**
* Run on Tuesday.
*/
Tuesday = 2,
/**
* Run on Wednesday.
*/
Wednesday = 4,
/**
* Run on Thursday.
*/
Thursday = 8,
/**
* Run on Friday.
*/
Friday = 16,
/**
* Run on Saturday.
*/
Saturday = 32,
/**
* Run on Sunday.
*/
Sunday = 64,
/**
* Run on all days of the week.
*/
All = 127,
}
export interface ScheduleTrigger extends BuildTrigger {
schedules: Schedule[];
}
export enum ServiceHostStatus {
/**
* The service host is currently connected and accepting commands.
*/
Online = 1,
/**
* The service host is currently disconnected and not accepting commands.
*/
Offline = 2,
}
/**
* An abstracted reference to some other resource. This class is used to provide the build data contracts with a uniform way to reference other resources in a way that provides easy traversal through links.
*/
export interface ShallowReference {
/**
* Id of the resource
*/
id: number;
/**
* Name of the linked resource (definition name, controller name, etc.)
*/
name: string;
/**
* Full http link to the resource
*/
url: string;
}
export interface SvnMappingDetails {
depth: number;
ignoreExternals: boolean;
localPath: string;
revision: string;
serverPath: string;
}
export interface SvnWorkspace {
mappings: SvnMappingDetails[];
}
export interface TaskAgentPoolReference {
id: number;
name: string;
}
export interface TaskDefinitionReference {
id: string;
versionSpec: string;
}
export interface TaskOrchestrationPlanReference {
planId: string;
}
export enum TaskResult {
Succeeded = 0,
SucceededWithIssues = 1,
Failed = 2,
Canceled = 3,
Skipped = 4,
Abandoned = 5,
}
export interface Timeline extends TimelineReference {
lastChangedBy: string;
lastChangedOn: Date;
records: TimelineRecord[];
}
export interface TimelineRecord {
_links: any;
changeId: number;
currentOperation: string;
details: TimelineReference;
errorCount: number;
finishTime: Date;
id: string;
issues: Issue[];
lastModified: Date;
log: BuildLogReference;
name: string;
order: number;
parentId: string;
percentComplete: number;
result: TaskResult;
resultCode: string;
startTime: Date;
state: TimelineRecordState;
type: string;
url: string;
warningCount: number;
workerName: string;
}
export enum TimelineRecordState {
Pending = 0,
InProgress = 1,
Completed = 2,
}
export interface TimelineRecordsUpdatedEvent extends RealtimeBuildEvent {
timelineRecords: TimelineRecord[];
}
export interface TimelineReference {
changeId: number;
id: string;
url: string;
}
export enum ValidationResult {
OK = 0,
Warning = 1,
Error = 2,
}
/**
* Mapping for a workspace
*/
export interface WorkspaceMapping {
/**
* Uri of the associated definition
*/
definitionUri: string;
/**
* Depth of this mapping
*/
depth: number;
/**
* local location of the definition
*/
localItem: string;
/**
* type of workspace mapping
*/
mappingType: WorkspaceMappingType;
/**
* Server location of the definition
*/
serverItem: string;
/**
* Id of the workspace
*/
workspaceId: number;
}
export enum WorkspaceMappingType {
/**
* The path is mapped in the workspace.
*/
Map = 0,
/**
* The path is cloaked in the workspace.
*/
Cloak = 1,
}
export interface WorkspaceTemplate {
/**
* Uri of the associated definition
*/
definitionUri: string;
/**
* The identity that last modified this template
*/
lastModifiedBy: string;
/**
* The last time this template was modified
*/
lastModifiedDate: Date;
/**
* List of workspace mappings
*/
mappings: WorkspaceMapping[];
/**
* Id of the workspace for this template
*/
workspaceId: number;
}
export interface XamlBuildDefinition extends DefinitionReference {
_links: any;
/**
* Batch size of the definition
*/
batchSize: number;
buildArgs: string;
/**
* The continuous integration quiet period
*/
continuousIntegrationQuietPeriod: number;
/**
* The build controller
*/
controller: BuildController;
/**
* The date this definition was created
*/
createdOn: Date;
/**
* Default drop location for builds from this definition
*/
defaultDropLocation: string;
/**
* Description of the definition
*/
description: string;
/**
* The last build on this definition
*/
lastBuild: ShallowReference;
/**
* The repository
*/
repository: BuildRepository;
/**
* The reasons supported by the template
*/
supportedReasons: BuildReason;
/**
* How builds are triggered from this definition
*/
triggerType: DefinitionTriggerType;
}
export var TypeInfo: {
AgentPoolQueue: {
fields: any;
};
AgentStatus: {
enumValues: {
"unavailable": number;
"available": number;
"offline": number;
};
};
ArtifactResource: {
fields: any;
};
AuditAction: {
enumValues: {
"add": number;
"update": number;
"delete": number;
};
};
Build: {
fields: any;
};
BuildAgent: {
fields: any;
};
BuildArtifact: {
fields: any;
};
BuildArtifactAddedEvent: {
fields: any;
};
BuildAuthorizationScope: {
enumValues: {
"projectCollection": number;
"project": number;
};
};
BuildBadge: {
fields: any;
};
BuildChangesCalculatedEvent: {
fields: any;
};
BuildCompletedEvent: {
fields: any;
};
BuildController: {
fields: any;
};
BuildDefinition: {
fields: any;
};
BuildDefinitionChangedEvent: {
fields: any;
};
BuildDefinitionChangingEvent: {
fields: any;
};
BuildDefinitionReference: {
fields: any;
};
BuildDefinitionRevision: {
fields: any;
};
BuildDefinitionSourceProvider: {
fields: any;
};
BuildDefinitionStep: {
fields: any;
};
BuildDefinitionTemplate: {
fields: any;
};
BuildDefinitionVariable: {
fields: any;
};
BuildDeletedEvent: {
fields: any;
};
BuildDeployment: {
fields: any;
};
BuildDestroyedEvent: {
fields: any;
};
BuildLog: {
fields: any;
};
BuildLogReference: {
fields: any;
};
BuildOption: {
fields: any;
};
BuildOptionDefinition: {
fields: any;
};
BuildOptionDefinitionReference: {
fields: any;
};
BuildOptionGroupDefinition: {
fields: any;
};
BuildOptionInputDefinition: {
fields: any;
};
BuildOptionInputType: {
enumValues: {
"string": number;
"boolean": number;
"stringList": number;
"radio": number;
"pickList": number;
"multiLine": number;
};
};
BuildPhaseStatus: {
enumValues: {
"unknown": number;
"failed": number;
"succeeded": number;
};
};
BuildPollingSummaryEvent: {
fields: any;
};
BuildProcessTemplate: {
fields: any;
};
BuildQueryOrder: {
enumValues: {
"finishTimeAscending": number;
"finishTimeDescending": number;
};
};
BuildQueuedEvent: {
fields: any;
};
BuildReason: {
enumValues: {
"none": number;
"manual": number;
"individualCI": number;
"batchedCI": number;
"schedule": number;
"userCreated": number;
"validateShelveset": number;
"checkInShelveset": number;
"triggered": number;
"all": number;
};
};
BuildReference: {
fields: any;
};
BuildReportMetadata: {
fields: any;
};
BuildRepository: {
fields: any;
};
BuildRequestValidationResult: {
fields: any;
};
BuildResourceUsage: {
fields: any;
};
BuildResult: {
enumValues: {
"none": number;
"succeeded": number;
"partiallySucceeded": number;
"failed": number;
"canceled": number;
};
};
BuildServer: {
fields: any;
};
BuildSettings: {
fields: any;
};
BuildStartedEvent: {
fields: any;
};
BuildStatus: {
enumValues: {
"none": number;
"inProgress": number;
"completed": number;
"cancelling": number;
"postponed": number;
"notStarted": number;
"all": number;
};
};
BuildSummary: {
fields: any;
};
BuildTrigger: {
fields: any;
};
BuildUpdatedEvent: {
fields: any;
};
BuildWorkspace: {
fields: any;
};
Change: {
fields: any;
};
ConsoleLogEvent: {
fields: any;
};
ContinuousDeploymentDefinition: {
fields: any;
};
ContinuousIntegrationTrigger: {
fields: any;
};
ControllerStatus: {
enumValues: {
"unavailable": number;
"available": number;
"offline": number;
};
};
DefinitionQuality: {
enumValues: {
"definition": number;
"draft": number;
};
};
DefinitionQueryOrder: {
enumValues: {
"none": number;
"lastModifiedAscending": number;
"lastModifiedDescending": number;
};
};
DefinitionQueueStatus: {
enumValues: {
"enabled": number;
"paused": number;
"disabled": number;
};
};
DefinitionReference: {
fields: any;
};
DefinitionTriggerType: {
enumValues: {
"none": number;
"continuousIntegration": number;
"batchedContinuousIntegration": number;
"schedule": number;
"gatedCheckIn": number;
"batchedGatedCheckIn": number;
"all": number;
};
};
DefinitionType: {
enumValues: {
"xaml": number;
"build": number;
};
};
DeleteOptions: {
enumValues: {
"none": number;
"dropLocation": number;
"testResults": number;
"label": number;
"details": number;
"symbols": number;
"all": number;
};
};
Deployment: {
fields: any;
};
DeploymentBuild: {
fields: any;
};
DeploymentDeploy: {
fields: any;
};
DeploymentTest: {
fields: any;
};
GatedCheckInTrigger: {
fields: any;
};
GetOption: {
enumValues: {
"latestOnQueue": number;
"latestOnBuild": number;
"custom": number;
};
};
InformationNode: {
fields: any;
};
Issue: {
fields: any;
};
IssueType: {
enumValues: {
"error": number;
"warning": number;
};
};
MappingDetails: {
fields: any;
};
ProcessTemplateType: {
enumValues: {
"custom": number;
"default": number;
"upgrade": number;
};
};
PropertyValue: {
fields: any;
};
QueryDeletedOption: {
enumValues: {
"excludeDeleted": number;
"includeDeleted": number;
"onlyDeleted": number;
};
};
QueueOptions: {
enumValues: {
"none": number;
"doNotRun": number;
};
};
QueuePriority: {
enumValues: {
"low": number;
"belowNormal": number;
"normal": number;
"aboveNormal": number;
"high": number;
};
};
RealtimeBuildEvent: {
fields: any;
};
RequestReference: {
fields: any;
};
RetentionPolicy: {
fields: any;
};
Schedule: {
fields: any;
};
ScheduleDays: {
enumValues: {
"none": number;
"monday": number;
"tuesday": number;
"wednesday": number;
"thursday": number;
"friday": number;
"saturday": number;
"sunday": number;
"all": number;
};
};
ScheduleTrigger: {
fields: any;
};
ServiceHostStatus: {
enumValues: {
"online": number;
"offline": number;
};
};
ShallowReference: {
fields: any;
};
SvnMappingDetails: {
fields: any;
};
SvnWorkspace: {
fields: any;
};
TaskAgentPoolReference: {
fields: any;
};
TaskDefinitionReference: {
fields: any;
};
TaskOrchestrationPlanReference: {
fields: any;
};
TaskResult: {
enumValues: {
"succeeded": number;
"succeededWithIssues": number;
"failed": number;
"canceled": number;
"skipped": number;
"abandoned": number;
};
};
Timeline: {
fields: any;
};
TimelineRecord: {
fields: any;
};
TimelineRecordState: {
enumValues: {
"pending": number;
"inProgress": number;
"completed": number;
};
};
TimelineRecordsUpdatedEvent: {
fields: any;
};
TimelineReference: {
fields: any;
};
ValidationResult: {
enumValues: {
"oK": number;
"warning": number;
"error": number;
};
};
WorkspaceMapping: {
fields: any;
};
WorkspaceMappingType: {
enumValues: {
"map": number;
"cloak": number;
};
};
WorkspaceTemplate: {
fields: any;
};
XamlBuildDefinition: {
fields: any;
};
};
}
declare module "TFS/Build/ExtensionContracts" {
import Build_Contracts = require("TFS/Build/Contracts");
/**
* Interface defining the configuration that is shared between extension targeted at "ms.vss-build-web.build-results-view" and the host
*/
export interface IBuildResultsViewExtensionConfig {
/**
* Required if reacting to the current build.
* More than one callbacks can be added, and all will be called.
* It is important to have atleast one call back, since that's how an extension can get information about the current build.
*/
onBuildChanged: (handler: (build: Build_Contracts.Build) => void) => void;
/**
* Optional, If needed, this callback will be called when this particular extension is selected/displayed
*/
onViewDisplayed: (onDisplayedCallBack: () => void) => void;
/**
* Optional, for a given tab id, which can be contribution id for tab or a well known tab id,
* the corresponding tab is selected if the tab is visible.
*/
selectTab: (tabId: string) => void;
}
/**
* Existing tab ids in build results view
*/
export var BuildResultsViewTabIds: {
Summary: string;
Console: string;
Logs: string;
Timeline: string;
Artifacts: string;
XamlLog: string;
XamlDiagnostics: string;
};
/**
* Existing section ids in build results view's summary tab
*/
export var BuildResultsSummaryTabSectionIds: {
BuildDetails: string;
BuildIssues: string;
AssociatedChangeset: string;
DeploymentInformation: string;
BuildTags: string;
TestSummary: string;
CodeCoverageSummary: string;
AssociatedWorkItem: string;
};
}
declare module "TFS/Build/RestClient" {
import Contracts = require("TFS/Build/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class BuildHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* Associates an artifact with a build
*
* @param {Contracts.BuildArtifact} artifact
* @param {number} buildId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BuildArtifact>
*/
createArtifact(artifact: Contracts.BuildArtifact, buildId: number, project?: string): IPromise<Contracts.BuildArtifact>;
/**
* Gets a specific artifact for a build
*
* @param {number} buildId
* @param {string} artifactName
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BuildArtifact>
*/
getArtifact(buildId: number, artifactName: string, project?: string): IPromise<Contracts.BuildArtifact>;
/**
* Gets a specific artifact for a build
*
* @param {number} buildId
* @param {string} artifactName
* @param {string} project - Project ID or project name
* @return IPromise<ArrayBuffer>
*/
getArtifactContentZip(buildId: number, artifactName: string, project?: string): IPromise<ArrayBuffer>;
/**
* Gets all artifacts for a build
*
* @param {number} buildId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BuildArtifact[]>
*/
getArtifacts(buildId: number, project?: string): IPromise<Contracts.BuildArtifact[]>;
/**
* @param {string} project
* @param {number} definitionId
* @param {string} branchName
* @return IPromise<string>
*/
getBadge(project: string, definitionId: number, branchName?: string): IPromise<string>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {string} repoType
* @param {string} repoId
* @param {string} branchName
* @return IPromise<Contracts.BuildBadge>
*/
getBuildBadge(project: string, repoType: string, repoId?: string, branchName?: string): IPromise<Contracts.BuildBadge>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {string} repoType
* @param {string} repoId
* @param {string} branchName
* @return IPromise<string>
*/
getBuildBadgeData(project: string, repoType: string, repoId?: string, branchName?: string): IPromise<string>;
/**
* Deletes a build
*
* @param {number} buildId
* @param {string} project - Project ID or project name
* @return IPromise<void>
*/
deleteBuild(buildId: number, project?: string): IPromise<void>;
/**
* Gets a build
*
* @param {number} buildId
* @param {string} project - Project ID or project name
* @param {string} propertyFilters - A comma-delimited list of properties to include in the results
* @return IPromise<Contracts.Build>
*/
getBuild(buildId: number, project?: string, propertyFilters?: string): IPromise<Contracts.Build>;
/**
* Gets builds
*
* @param {string} project - Project ID or project name
* @param {number[]} definitions - A comma-delimited list of definition ids
* @param {number[]} queues - A comma-delimited list of queue ids
* @param {string} buildNumber
* @param {Date} minFinishTime
* @param {Date} maxFinishTime
* @param {string} requestedFor
* @param {Contracts.BuildReason} reasonFilter
* @param {Contracts.BuildStatus} statusFilter
* @param {Contracts.BuildResult} resultFilter
* @param {string[]} tagFilters - A comma-delimited list of tags
* @param {string[]} properties - A comma-delimited list of properties to include in the results
* @param {Contracts.DefinitionType} type - The definition type
* @param {number} top - The maximum number of builds to retrieve
* @param {string} continuationToken
* @param {number} maxBuildsPerDefinition
* @param {Contracts.QueryDeletedOption} deletedFilter
* @param {Contracts.BuildQueryOrder} queryOrder
* @param {string} branchName
* @return IPromise<Contracts.Build[]>
*/
getBuilds(project?: string, definitions?: number[], queues?: number[], buildNumber?: string, minFinishTime?: Date, maxFinishTime?: Date, requestedFor?: string, reasonFilter?: Contracts.BuildReason, statusFilter?: Contracts.BuildStatus, resultFilter?: Contracts.BuildResult, tagFilters?: string[], properties?: string[], type?: Contracts.DefinitionType, top?: number, continuationToken?: string, maxBuildsPerDefinition?: number, deletedFilter?: Contracts.QueryDeletedOption, queryOrder?: Contracts.BuildQueryOrder, branchName?: string): IPromise<Contracts.Build[]>;
/**
* Queues a build
*
* @param {Contracts.Build} build
* @param {string} project - Project ID or project name
* @param {boolean} ignoreWarnings
* @param {string} checkInTicket
* @return IPromise<Contracts.Build>
*/
queueBuild(build: Contracts.Build, project?: string, ignoreWarnings?: boolean, checkInTicket?: string): IPromise<Contracts.Build>;
/**
* Updates a build
*
* @param {Contracts.Build} build
* @param {number} buildId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.Build>
*/
updateBuild(build: Contracts.Build, buildId: number, project?: string): IPromise<Contracts.Build>;
/**
* Gets the changes associated with a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} continuationToken
* @param {number} top - The maximum number of changes to return
* @param {boolean} includeSourceChange
* @return IPromise<Contracts.Change[]>
*/
getBuildChanges(project: string, buildId: number, continuationToken?: string, top?: number, includeSourceChange?: boolean): IPromise<Contracts.Change[]>;
/**
* @exemptedapi
* [Preview API] Gets the changes associated between given builds
*
* @param {string} project - Project ID or project name
* @param {number} fromBuildId
* @param {number} toBuildId
* @param {number} top - The maximum number of changes to return
* @return IPromise<Contracts.Change[]>
*/
getChangesBetweenBuilds(project: string, fromBuildId?: number, toBuildId?: number, top?: number): IPromise<Contracts.Change[]>;
/**
* Gets a controller
*
* @param {number} controllerId
* @return IPromise<Contracts.BuildController>
*/
getBuildController(controllerId: number): IPromise<Contracts.BuildController>;
/**
* Gets controller, optionally filtered by name
*
* @param {string} name
* @return IPromise<Contracts.BuildController[]>
*/
getBuildControllers(name?: string): IPromise<Contracts.BuildController[]>;
/**
* Creates a new definition
*
* @param {Contracts.BuildDefinition} definition
* @param {string} project - Project ID or project name
* @param {number} definitionToCloneId
* @param {number} definitionToCloneRevision
* @return IPromise<Contracts.BuildDefinition>
*/
createDefinition(definition: Contracts.BuildDefinition, project?: string, definitionToCloneId?: number, definitionToCloneRevision?: number): IPromise<Contracts.BuildDefinition>;
/**
* Deletes a definition and all associated builds
*
* @param {number} definitionId
* @param {string} project - Project ID or project name
* @return IPromise<void>
*/
deleteDefinition(definitionId: number, project?: string): IPromise<void>;
/**
* Gets a definition, optionally at a specific revision
*
* @param {number} definitionId
* @param {string} project - Project ID or project name
* @param {number} revision
* @param {string[]} propertyFilters
* @return IPromise<Contracts.DefinitionReference>
*/
getDefinition(definitionId: number, project?: string, revision?: number, propertyFilters?: string[]): IPromise<Contracts.DefinitionReference>;
/**
* Gets definitions, optionally filtered by name
*
* @param {string} project - Project ID or project name
* @param {string} name
* @param {Contracts.DefinitionType} type
* @param {string} repositoryId
* @param {string} repositoryType
* @param {Contracts.DefinitionQueryOrder} queryOrder
* @param {number} top
* @return IPromise<Contracts.DefinitionReference[]>
*/
getDefinitions(project?: string, name?: string, type?: Contracts.DefinitionType, repositoryId?: string, repositoryType?: string, queryOrder?: Contracts.DefinitionQueryOrder, top?: number): IPromise<Contracts.DefinitionReference[]>;
/**
* Updates an existing definition
*
* @param {Contracts.BuildDefinition} definition
* @param {number} definitionId
* @param {string} project - Project ID or project name
* @param {number} secretsSourceDefinitionId
* @param {number} secretsSourceDefinitionRevision
* @return IPromise<Contracts.BuildDefinition>
*/
updateDefinition(definition: Contracts.BuildDefinition, definitionId: number, project?: string, secretsSourceDefinitionId?: number, secretsSourceDefinitionRevision?: number): IPromise<Contracts.BuildDefinition>;
/**
* Gets the deployment information associated with a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @return IPromise<Contracts.Deployment[]>
*/
getBuildDeployments(project: string, buildId: number): IPromise<Contracts.Deployment[]>;
/**
* Gets a log
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {number} logId
* @param {number} startLine
* @param {number} endLine
* @return IPromise<ArrayBuffer>
*/
getBuildLog(project: string, buildId: number, logId: number, startLine?: number, endLine?: number): IPromise<ArrayBuffer>;
/**
* Gets logs for a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @return IPromise<Contracts.BuildLog[]>
*/
getBuildLogs(project: string, buildId: number): IPromise<Contracts.BuildLog[]>;
/**
* Gets logs for a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @return IPromise<ArrayBuffer>
*/
getBuildLogsZip(project: string, buildId: number): IPromise<ArrayBuffer>;
/**
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BuildOptionDefinition[]>
*/
getBuildOptionDefinitions(project?: string): IPromise<Contracts.BuildOptionDefinition[]>;
/**
* Creates a build queue
*
* @param {Contracts.AgentPoolQueue} queue
* @return IPromise<Contracts.AgentPoolQueue>
*/
createQueue(queue: Contracts.AgentPoolQueue): IPromise<Contracts.AgentPoolQueue>;
/**
* Deletes a build queue
*
* @param {number} id
* @return IPromise<void>
*/
deleteQueue(id: number): IPromise<void>;
/**
* Gets a queue
*
* @param {number} controllerId
* @return IPromise<Contracts.AgentPoolQueue>
*/
getAgentPoolQueue(controllerId: number): IPromise<Contracts.AgentPoolQueue>;
/**
* Gets queues, optionally filtered by name
*
* @param {string} name
* @return IPromise<Contracts.AgentPoolQueue[]>
*/
getQueues(name?: string): IPromise<Contracts.AgentPoolQueue[]>;
/**
* @exemptedapi
* [Preview API] Gets report for a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} type
* @return IPromise<Contracts.BuildReportMetadata>
*/
getBuildReport(project: string, buildId: number, type?: string): IPromise<Contracts.BuildReportMetadata>;
/**
* @exemptedapi
* [Preview API] Gets report for a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} type
* @return IPromise<any>
*/
getBuildReportHtmlContent(project: string, buildId: number, type?: string): IPromise<any>;
/**
* @exemptedapi
* [Preview API]
*
* @return IPromise<Contracts.BuildResourceUsage>
*/
getResourceUsage(): IPromise<Contracts.BuildResourceUsage>;
/**
* Gets revisions of a definition
*
* @param {string} project - Project ID or project name
* @param {number} definitionId
* @return IPromise<Contracts.BuildDefinitionRevision[]>
*/
getDefinitionRevisions(project: string, definitionId: number): IPromise<Contracts.BuildDefinitionRevision[]>;
/**
* @return IPromise<Contracts.BuildSettings>
*/
getBuildSettings(): IPromise<Contracts.BuildSettings>;
/**
* Updates the build settings
*
* @param {Contracts.BuildSettings} settings
* @return IPromise<Contracts.BuildSettings>
*/
updateBuildSettings(settings: Contracts.BuildSettings): IPromise<Contracts.BuildSettings>;
/**
* Adds a tag to a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} tag
* @return IPromise<string[]>
*/
addBuildTag(project: string, buildId: number, tag: string): IPromise<string[]>;
/**
* Adds tag to a build
*
* @param {string[]} tags
* @param {string} project - Project ID or project name
* @param {number} buildId
* @return IPromise<string[]>
*/
addBuildTags(tags: string[], project: string, buildId: number): IPromise<string[]>;
/**
* Deletes a tag from a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} tag
* @return IPromise<string[]>
*/
deleteBuildTag(project: string, buildId: number, tag: string): IPromise<string[]>;
/**
* Gets the tags for a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @return IPromise<string[]>
*/
getBuildTags(project: string, buildId: number): IPromise<string[]>;
/**
* @param {string} project - Project ID or project name
* @return IPromise<string[]>
*/
getTags(project: string): IPromise<string[]>;
/**
* Deletes a definition template
*
* @param {string} project - Project ID or project name
* @param {string} templateId
* @return IPromise<void>
*/
deleteTemplate(project: string, templateId: string): IPromise<void>;
/**
* Gets definition template filtered by id
*
* @param {string} project - Project ID or project name
* @param {string} templateId
* @return IPromise<Contracts.BuildDefinitionTemplate>
*/
getTemplate(project: string, templateId: string): IPromise<Contracts.BuildDefinitionTemplate>;
/**
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BuildDefinitionTemplate[]>
*/
getTemplates(project: string): IPromise<Contracts.BuildDefinitionTemplate[]>;
/**
* Saves a definition template
*
* @param {Contracts.BuildDefinitionTemplate} template
* @param {string} project - Project ID or project name
* @param {string} templateId
* @return IPromise<Contracts.BuildDefinitionTemplate>
*/
saveTemplate(template: Contracts.BuildDefinitionTemplate, project: string, templateId: string): IPromise<Contracts.BuildDefinitionTemplate>;
/**
* Gets details for a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} timelineId
* @param {number} changeId
* @return IPromise<Contracts.Timeline>
*/
getBuildTimeline(project: string, buildId: number, timelineId?: string, changeId?: number): IPromise<Contracts.Timeline>;
/**
* Gets the work item ids associated with a build
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {number} top - The maximum number of workitems to return
* @return IPromise<VSS_Common_Contracts.ResourceRef[]>
*/
getBuildWorkItemsRefs(project: string, buildId: number, top?: number): IPromise<VSS_Common_Contracts.ResourceRef[]>;
/**
* Gets the work item ids associated with build commits
*
* @param {string[]} commitIds
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {number} top - The maximum number of workitems to return, also number of commits to consider if commitids are not sent
* @return IPromise<VSS_Common_Contracts.ResourceRef[]>
*/
getBuildWorkItemsRefsFromCommits(commitIds: string[], project: string, buildId: number, top?: number): IPromise<VSS_Common_Contracts.ResourceRef[]>;
/**
* @exemptedapi
* [Preview API] Gets all the work item ids inbetween fromBuildId to toBuildId
*
* @param {string} project - Project ID or project name
* @param {number} fromBuildId
* @param {number} toBuildId
* @param {number} top - The maximum number of workitems to return
* @return IPromise<VSS_Common_Contracts.ResourceRef[]>
*/
getWorkItemsBetweenBuilds(project: string, fromBuildId: number, toBuildId: number, top?: number): IPromise<VSS_Common_Contracts.ResourceRef[]>;
}
export class BuildHttpClient extends BuildHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return BuildHttpClient2_2
*/
export function getClient(): BuildHttpClient2_2;
}
declare module "TFS/Core/Contracts" {
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
export enum ConnectedServiceKind {
/**
* Custom or unknown service
*/
Custom = 0,
/**
* Azure Subscription
*/
AzureSubscription = 1,
/**
* Chef Connection
*/
Chef = 2,
/**
* Generic Connection
*/
Generic = 3,
}
export interface IdentityData {
identityIds: string[];
}
export interface Process extends ProcessReference {
_links: any;
description: string;
id: string;
isDefault: boolean;
type: ProcessType;
}
export interface ProcessReference {
name: string;
url: string;
}
export enum ProcessType {
System = 0,
Custom = 1,
Inherited = 2,
}
export enum ProjectChangeType {
Modified = 0,
Deleted = 1,
Added = 2,
}
/**
* Contains information of the project
*/
export interface ProjectInfo {
abbreviation: string;
description: string;
id: string;
lastUpdateTime: Date;
name: string;
properties: ProjectProperty[];
/**
* Current revision of the project
*/
revision: number;
state: any;
uri: string;
version: number;
}
export interface ProjectMessage {
project: ProjectInfo;
projectChangeType: ProjectChangeType;
}
export interface ProjectProperty {
name: string;
value: string;
}
export interface Proxy {
/**
* This is a description string
*/
description: string;
/**
* The friendly name of the server
*/
friendlyName: string;
globalDefault: boolean;
/**
* This is a string representation of the site that the proxy server is located in (e.g. "NA-WA-RED")
*/
site: string;
siteDefault: boolean;
/**
* The URL of the proxy server
*/
url: string;
}
export enum SourceControlTypes {
Tfvc = 1,
Git = 2,
}
/**
* The Team Context for an operation.
*/
export interface TeamContext {
/**
* The team project Id or name. Ignored if ProjectId is set.
*/
project: string;
/**
* The Team Project ID. Required if Project is not set.
*/
projectId: string;
/**
* The Team Id or name. Ignored if TeamId is set.
*/
team: string;
/**
* The Team Id
*/
teamId: string;
}
/**
* Represents a Team Project object.
*/
export interface TeamProject extends TeamProjectReference {
/**
* The links to other objects related to this object.
*/
_links: any;
/**
* Set of capabilities this project has (such as process template & version control).
*/
capabilities: {
[key: string]: {
[key: string]: string;
};
};
/**
* The shallow ref to the default team.
*/
defaultTeam: WebApiTeamRef;
}
/**
* Data contract for a TeamProjectCollection.
*/
export interface TeamProjectCollection extends TeamProjectCollectionReference {
/**
* The links to other objects related to this object.
*/
_links: any;
/**
* Project collection description.
*/
description: string;
/**
* Project collection state.
*/
state: string;
}
/**
* Reference object for a TeamProjectCollection.
*/
export interface TeamProjectCollectionReference {
/**
* Collection Id.
*/
id: string;
/**
* Collection Name.
*/
name: string;
/**
* Collection REST Url.
*/
url: string;
}
/**
* Represents a shallow reference to a TeamProject.
*/
export interface TeamProjectReference {
/**
* Project abbreviation.
*/
abbreviation: string;
/**
* The project's description (if any).
*/
description: string;
/**
* Project identifier.
*/
id: string;
/**
* Project name.
*/
name: string;
/**
* Project revision.
*/
revision: number;
/**
* Project state.
*/
state: any;
/**
* Url to the full version of the object.
*/
url: string;
}
export interface WebApiConnectedService extends WebApiConnectedServiceRef {
/**
* The user who did the OAuth authentication to created this service
*/
authenticatedBy: VSS_Common_Contracts.IdentityRef;
/**
* Extra description on the service.
*/
description: string;
/**
* Friendly Name of service connection
*/
friendlyName: string;
/**
* Id/Name of the connection service. For Ex: Subscription Id for Azure Connection
*/
id: string;
/**
* The kind of service.
*/
kind: string;
/**
* The project associated with this service
*/
project: TeamProjectReference;
/**
* Optional uri to connect directly to the service such as https://windows.azure.com
*/
serviceUri: string;
}
export interface WebApiConnectedServiceDetails extends WebApiConnectedServiceRef {
/**
* Meta data for service connection
*/
connectedServiceMetaData: WebApiConnectedService;
/**
* Credential info
*/
credentialsXml: string;
/**
* Optional uri to connect directly to the service such as https://windows.azure.com
*/
endPoint: string;
}
export interface WebApiConnectedServiceRef {
id: string;
url: string;
}
/**
* The representation of data needed to create a tag definition which is sent across the wire.
*/
export interface WebApiCreateTagRequestData {
name: string;
}
export interface WebApiProject extends TeamProjectReference {
/**
* Set of capabilities this project has
*/
capabilities: {
[key: string]: {
[key: string]: string;
};
};
/**
* Reference to collection which contains this project
*/
collection: WebApiProjectCollectionRef;
/**
* Default team for this project
*/
defaultTeam: WebApiTeamRef;
}
export interface WebApiProjectCollection extends WebApiProjectCollectionRef {
/**
* Project collection description
*/
description: string;
/**
* Project collection state
*/
state: string;
}
export interface WebApiProjectCollectionRef {
/**
* Collection Tfs Url (Host Url)
*/
collectionUrl: string;
/**
* Collection Guid
*/
id: string;
/**
* Collection Name
*/
name: string;
/**
* Collection REST Url
*/
url: string;
}
/**
* The representation of a tag definition which is sent across the wire.
*/
export interface WebApiTagDefinition {
active: boolean;
id: string;
name: string;
url: string;
}
export interface WebApiTeam extends WebApiTeamRef {
/**
* Team description
*/
description: string;
/**
* Identity REST API Url to this team
*/
identityUrl: string;
}
export interface WebApiTeamRef {
/**
* Team (Identity) Guid. A Team Foundation ID.
*/
id: string;
/**
* Team name
*/
name: string;
/**
* Team REST API Url
*/
url: string;
}
export var TypeInfo: {
ConnectedServiceKind: {
enumValues: {
"custom": number;
"azureSubscription": number;
"chef": number;
"generic": number;
};
};
IdentityData: {
fields: any;
};
Process: {
fields: any;
};
ProcessReference: {
fields: any;
};
ProcessType: {
enumValues: {
"system": number;
"custom": number;
"inherited": number;
};
};
ProjectChangeType: {
enumValues: {
"modified": number;
"deleted": number;
"added": number;
};
};
ProjectInfo: {
fields: any;
};
ProjectMessage: {
fields: any;
};
ProjectProperty: {
fields: any;
};
Proxy: {
fields: any;
};
SourceControlTypes: {
enumValues: {
"tfvc": number;
"git": number;
};
};
TeamContext: {
fields: any;
};
TeamProject: {
fields: any;
};
TeamProjectCollection: {
fields: any;
};
TeamProjectCollectionReference: {
fields: any;
};
TeamProjectReference: {
fields: any;
};
WebApiConnectedService: {
fields: any;
};
WebApiConnectedServiceDetails: {
fields: any;
};
WebApiConnectedServiceRef: {
fields: any;
};
WebApiCreateTagRequestData: {
fields: any;
};
WebApiProject: {
fields: any;
};
WebApiProjectCollection: {
fields: any;
};
WebApiProjectCollectionRef: {
fields: any;
};
WebApiTagDefinition: {
fields: any;
};
WebApiTeam: {
fields: any;
};
WebApiTeamRef: {
fields: any;
};
};
}
declare module "TFS/Core/RestClient" {
import Contracts = require("TFS/Core/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_Operations_Contracts = require("VSS/Operations/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class CoreHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.WebApiConnectedServiceDetails} connectedServiceCreationData
* @param {string} projectId
* @return IPromise<Contracts.WebApiConnectedService>
*/
createConnectedService(connectedServiceCreationData: Contracts.WebApiConnectedServiceDetails, projectId: string): IPromise<Contracts.WebApiConnectedService>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} projectId
* @param {string} name
* @return IPromise<Contracts.WebApiConnectedServiceDetails>
*/
getConnectedServiceDetails(projectId: string, name: string): IPromise<Contracts.WebApiConnectedServiceDetails>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} projectId
* @param {Contracts.ConnectedServiceKind} kind
* @return IPromise<Contracts.WebApiConnectedService[]>
*/
getConnectedServices(projectId: string, kind?: Contracts.ConnectedServiceKind): IPromise<Contracts.WebApiConnectedService[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.IdentityData} mruData
* @param {string} mruName
* @return IPromise<void>
*/
createIdentityMru(mruData: Contracts.IdentityData, mruName: string): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.IdentityData} mruData
* @param {string} mruName
* @return IPromise<void>
*/
deleteIdentityMru(mruData: Contracts.IdentityData, mruName: string): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} mruName
* @return IPromise<VSS_Common_Contracts.IdentityRef[]>
*/
getIdentityMru(mruName: string): IPromise<VSS_Common_Contracts.IdentityRef[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.IdentityData} mruData
* @param {string} mruName
* @return IPromise<void>
*/
updateIdentityMru(mruData: Contracts.IdentityData, mruName: string): IPromise<void>;
/**
* @param {string} projectId
* @param {string} teamId
* @param {number} top
* @param {number} skip
* @return IPromise<VSS_Common_Contracts.IdentityRef[]>
*/
getTeamMembers(projectId: string, teamId: string, top?: number, skip?: number): IPromise<VSS_Common_Contracts.IdentityRef[]>;
/**
* Retrieve process by id
*
* @param {string} processId
* @return IPromise<Contracts.Process>
*/
getProcessById(processId: string): IPromise<Contracts.Process>;
/**
* @return IPromise<Contracts.Process[]>
*/
getProcesses(): IPromise<Contracts.Process[]>;
/**
* Get project collection with the specified id or name.
*
* @param {string} collectionId
* @return IPromise<Contracts.TeamProjectCollection>
*/
getProjectCollection(collectionId: string): IPromise<Contracts.TeamProjectCollection>;
/**
* Get project collection references for this application.
*
* @param {number} top
* @param {number} skip
* @return IPromise<Contracts.TeamProjectCollectionReference[]>
*/
getProjectCollections(top?: number, skip?: number): IPromise<Contracts.TeamProjectCollectionReference[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} minRevision
* @return IPromise<Contracts.TeamProjectReference[]>
*/
getProjectHistory(minRevision?: number): IPromise<Contracts.TeamProjectReference[]>;
/**
* Get project with the specified id or name, optionally including capabilities.
*
* @param {string} projectId
* @param {boolean} includeCapabilities - Include capabilities (such as source control) in the team project result (default: false).
* @param {boolean} includeHistory - Search within renamed projects (that had such name in the past).
* @return IPromise<Contracts.TeamProject>
*/
getProject(projectId: string, includeCapabilities?: boolean, includeHistory?: boolean): IPromise<Contracts.TeamProject>;
/**
* Get project references with the specified state
*
* @param {any} stateFilter - Filter on team projects in a specific team project state (default: WellFormed).
* @param {number} top
* @param {number} skip
* @return IPromise<Contracts.TeamProjectReference[]>
*/
getProjects(stateFilter?: any, top?: number, skip?: number): IPromise<Contracts.TeamProjectReference[]>;
/**
* Queue a project creation.
*
* @param {Contracts.TeamProject} projectToCreate - The project to create.
* @return IPromise<VSS_Operations_Contracts.OperationReference>
*/
queueCreateProject(projectToCreate: Contracts.TeamProject): IPromise<VSS_Operations_Contracts.OperationReference>;
/**
* Queue a project deletion.
*
* @param {string} projectId - The project id of the project to delete.
* @return IPromise<VSS_Operations_Contracts.OperationReference>
*/
queueDeleteProject(projectId: string): IPromise<VSS_Operations_Contracts.OperationReference>;
/**
* Update an existing project's name, abbreviation, or description.
*
* @param {Contracts.TeamProject} projectUpdate - The updates for the project.
* @param {string} projectId - The project id of the project to update.
* @return IPromise<VSS_Operations_Contracts.OperationReference>
*/
updateProject(projectUpdate: Contracts.TeamProject, projectId: string): IPromise<VSS_Operations_Contracts.OperationReference>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} proxyUrl
* @return IPromise<Contracts.Proxy[]>
*/
getProxies(proxyUrl?: string): IPromise<Contracts.Proxy[]>;
/**
* Creates a team
*
* @param {Contracts.WebApiTeam} team - The team data used to create the team.
* @param {string} projectId - The name or id (GUID) of the team project in which to create the team.
* @return IPromise<Contracts.WebApiTeam>
*/
createTeam(team: Contracts.WebApiTeam, projectId: string): IPromise<Contracts.WebApiTeam>;
/**
* Deletes a team
*
* @param {string} projectId - The name or id (GUID) of the team project containing the team to delete.
* @param {string} teamId - The name of id of the team to delete.
* @return IPromise<void>
*/
deleteTeam(projectId: string, teamId: string): IPromise<void>;
/**
* Gets a team
*
* @param {string} projectId
* @param {string} teamId
* @return IPromise<Contracts.WebApiTeam>
*/
getTeam(projectId: string, teamId: string): IPromise<Contracts.WebApiTeam>;
/**
* @param {string} projectId
* @param {number} top
* @param {number} skip
* @return IPromise<Contracts.WebApiTeam[]>
*/
getTeams(projectId: string, top?: number, skip?: number): IPromise<Contracts.WebApiTeam[]>;
/**
* Updates a team's name and/or description
*
* @param {Contracts.WebApiTeam} teamData
* @param {string} projectId - The name or id (GUID) of the team project containing the team to update.
* @param {string} teamId - The name of id of the team to update.
* @return IPromise<Contracts.WebApiTeam>
*/
updateTeam(teamData: Contracts.WebApiTeam, projectId: string, teamId: string): IPromise<Contracts.WebApiTeam>;
}
export class CoreHttpClient extends CoreHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return CoreHttpClient2_2
*/
export function getClient(): CoreHttpClient2_2;
}
declare module "TFS/DistributedTask/Contracts" {
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_FormInput_Contracts = require("VSS/Common/Contracts/FormInput");
export interface AgentPoolEvent {
eventType: string;
pool: TaskAgentPool;
}
export interface AgentQueueEvent {
eventType: string;
queue: TaskAgentQueue;
}
export interface AgentRefreshMessage {
agentId: number;
timeout: any;
}
export interface AuthorizationHeader {
name: string;
value: string;
}
export enum ConnectedServiceKind {
/**
* Custom or unknown service
*/
Custom = 0,
/**
* Azure Subscription
*/
AzureSubscription = 1,
/**
* Chef Connection
*/
Chef = 2,
/**
* Generic Connection
*/
Generic = 3,
/**
* GitHub Connection
*/
GitHub = 4,
}
export interface DataSource {
endpointUrl: string;
name: string;
resultSelector: string;
}
export interface DataSourceBinding {
dataSourceName: string;
endpointId: string;
parameters: {
[key: string]: string;
};
resultTemplate: string;
target: string;
}
export interface EndpointAuthorization {
parameters: {
[key: string]: string;
};
scheme: string;
}
export interface EndpointUrl {
displayName: string;
helpText: string;
value: string;
}
export interface HelpLink {
text: string;
url: string;
}
export interface Issue {
category: string;
data: {
[key: string]: string;
};
message: string;
type: IssueType;
}
export enum IssueType {
Error = 1,
Warning = 2,
}
export interface JobAssignedEvent extends JobEvent {
request: TaskAgentJobRequest;
}
export interface JobCancelMessage {
jobId: string;
timeout: any;
}
export interface JobCompletedEvent extends JobEvent {
requestId: number;
result: TaskResult;
}
/**
* Represents the context of variables and vectors for a job request.
*/
export interface JobEnvironment {
endpoints: ServiceEndpoint[];
mask: MaskHint[];
options: {
[key: string]: JobOption;
};
/**
* Gets or sets the endpoint used for communicating back to the calling service.
*/
systemConnection: ServiceEndpoint;
variables: {
[key: string]: string;
};
}
export interface JobEvent {
jobId: string;
name: string;
}
/**
* Represents an option that may affect the way an agent runs the job.
*/
export interface JobOption {
data: {
[key: string]: string;
};
/**
* Gets the id of the option.
*/
id: string;
}
export interface JobRequestMessage {
environment: JobEnvironment;
jobId: string;
jobName: string;
lockedUntil: Date;
lockToken: string;
plan: TaskOrchestrationPlanReference;
requestId: number;
tasks: TaskInstance[];
timeline: TimelineReference;
}
export interface MaskHint {
type: MaskType;
value: string;
}
export enum MaskType {
Variable = 1,
Regex = 2,
}
export interface PlanEnvironment {
mask: MaskHint[];
options: {
[key: string]: JobOption;
};
variables: {
[key: string]: string;
};
}
/**
* Represents an endpoint which may be used by an orchestration job.
*/
export interface ServiceEndpoint {
administratorsGroup: VSS_Common_Contracts.IdentityRef;
/**
* Gets or sets the authorization data for talking to the endpoint.
*/
authorization: EndpointAuthorization;
/**
* The Gets or sets Identity reference for the user who created the Service endpoint
*/
createdBy: VSS_Common_Contracts.IdentityRef;
data: {
[key: string]: string;
};
/**
* Gets or Sets description of endpoint
*/
description: string;
groupScopeId: string;
/**
* Gets or sets the identifier of this endpoint.
*/
id: string;
/**
* Gets or sets the friendly name of the endpoint.
*/
name: string;
readersGroup: VSS_Common_Contracts.IdentityRef;
/**
* Gets or sets the type of the endpoint.
*/
type: string;
/**
* Gets or sets the url of the endpoint.
*/
url: string;
}
export interface ServiceEndpointAuthenticationScheme {
authorizationHeaders: AuthorizationHeader[];
displayName: string;
endpointHeaders: AuthorizationHeader[];
inputDescriptors: VSS_FormInput_Contracts.InputDescriptor[];
scheme: string;
}
export interface ServiceEndpointType {
authenticationSchemes: ServiceEndpointAuthenticationScheme[];
dataSources: DataSource[];
description: string;
displayName: string;
endpointUrl: EndpointUrl;
helpLink: HelpLink;
helpMarkDown: string;
name: string;
}
export interface TaskAgent extends TaskAgentReference {
/**
* Gets the request which is currently assigned to this agent.
*/
assignedRequest: TaskAgentJobRequest;
/**
* Gets the date on which this agent was created.
*/
createdOn: Date;
/**
* Gets or sets the maximum job parallelism allowed on this host.
*/
maxParallelism: number;
properties: any;
/**
* Gets the date on which the last connectivity status change occurred.
*/
statusChangedOn: Date;
systemCapabilities: {
[key: string]: string;
};
userCapabilities: {
[key: string]: string;
};
}
export interface TaskAgentJobRequest {
assignTime: Date;
definition: TaskOrchestrationOwner;
demands: any[];
finishTime: Date;
hostId: string;
jobId: string;
lockedUntil: Date;
matchedAgents: TaskAgentReference[];
owner: TaskOrchestrationOwner;
planId: string;
planType: string;
queueTime: Date;
receiveTime: Date;
requestId: number;
reservedAgent: TaskAgentReference;
result: TaskResult;
scopeId: string;
serviceOwner: string;
}
export interface TaskAgentMessage {
body: string;
messageId: number;
messageType: string;
}
export interface TaskAgentPool extends TaskAgentPoolReference {
/**
* Gets the administrators group for this agent pool.
*/
administratorsGroup: VSS_Common_Contracts.IdentityRef;
/**
* Gets or sets a value indicating whether or not a queue should be automatically provisioned for each project collection or not.
*/
autoProvision: boolean;
/**
* Gets the identity who created this pool. The creator of the pool is automatically added into the administrators group for the pool on creation.
*/
createdBy: VSS_Common_Contracts.IdentityRef;
/**
* Gets the date/time of the pool creation.
*/
createdOn: Date;
/**
* Gets the scope identifier for groups/roles which are owned by this pool.
*/
groupScopeId: string;
/**
* Gets or sets a value indicating whether or not this pool is managed by the service.
*/
isHosted: boolean;
properties: any;
/**
* Gets a value indicating whether or not roles have been provisioned for this pool.
*/
provisioned: boolean;
/**
* Gets the service accounts group for this agent pool.
*/
serviceAccountsGroup: VSS_Common_Contracts.IdentityRef;
/**
* Gets the current size of the pool.
*/
size: number;
}
export interface TaskAgentPoolReference {
id: number;
name: string;
scope: string;
}
export interface TaskAgentQueue {
groupScopeId: string;
id: number;
name: string;
pool: TaskAgentPoolReference;
provisioned: boolean;
}
export enum TaskAgentQueueActionFilter {
None = 0,
Manage = 2,
Use = 16,
}
export interface TaskAgentReference {
_links: any;
/**
* Gets or sets a value indicating whether or not this agent should be enabled for job execution.
*/
enabled: boolean;
/**
* Gets the identifier of the agent.
*/
id: number;
/**
* Gets the name of the agent.
*/
name: string;
/**
* Gets the current connectivity status of the agent.
*/
status: TaskAgentStatus;
/**
* Gets the version of the agent.
*/
version: string;
}
export interface TaskAgentSession {
agent: TaskAgentReference;
ownerName: string;
sessionId: string;
systemCapabilities: {
[key: string]: string;
};
}
export enum TaskAgentStatus {
Offline = 1,
Online = 2,
}
export interface TaskAttachment {
_links: any;
createdOn: Date;
lastChangedBy: string;
lastChangedOn: Date;
name: string;
recordId: string;
timelineId: string;
type: string;
}
export interface TaskChangeEvent {
}
export interface TaskDefinition {
agentExecution: TaskExecution;
author: string;
category: string;
contentsUploaded: boolean;
contributionIdentifier: string;
contributionVersion: string;
dataSourceBindings: DataSourceBinding[];
demands: any[];
description: string;
disabled: boolean;
friendlyName: string;
groups: TaskGroupDefinition[];
helpMarkDown: string;
hostType: string;
iconUrl: string;
id: string;
inputs: TaskInputDefinition[];
instanceNameFormat: string;
minimumAgentVersion: string;
name: string;
packageLocation: string;
packageType: string;
serverOwned: boolean;
sourceDefinitions: TaskSourceDefinition[];
sourceLocation: string;
version: TaskVersion;
visibility: string[];
}
export interface TaskDefinitionEndpoint {
/**
* An ID that identifies a service connection to be used for authenticating endpoint requests.
*/
connectionId: string;
/**
* An Json based keyselector to filter response returned by fetching the endpoint Url.A Json based keyselector must be prefixed with "jsonpath:". KeySelector can be used to specify the filter to get the keys for the values specified with Selector. The following keyselector defines an Json for extracting nodes named 'ServiceName'. endpoint.KeySelector = "jsonpath://ServiceName";
*/
keySelector: string;
/**
* The scope as understood by Connected Services. Essentialy, a project-id for now.
*/
scope: string;
/**
* An XPath/Json based selector to filter response returned by fetching the endpoint Url. An XPath based selector must be prefixed with the string "xpath:". A Json based selector must be prefixed with "jsonpath:". The following selector defines an XPath for extracting nodes named 'ServiceName'. endpoint.Selector = "xpath://ServiceName";
*/
selector: string;
/**
* TaskId that this endpoint belongs to.
*/
taskId: string;
/**
* URL to GET.
*/
url: string;
}
export interface TaskExecution {
/**
* The utility task to run. Specifying this means that this task definition is simply a meta task to call another task. This is useful for tasks that call utility tasks like powershell and commandline
*/
execTask: TaskReference;
/**
* If a task is going to run code, then this provides the type/script etc... information by platform. For example, it might look like. net45: { typeName: "Microsoft.TeamFoundation.Automation.Tasks.PowerShellTask", assemblyName: "Microsoft.TeamFoundation.Automation.Tasks.PowerShell.dll" } net20: { typeName: "Microsoft.TeamFoundation.Automation.Tasks.PowerShellTask", assemblyName: "Microsoft.TeamFoundation.Automation.Tasks.PowerShell.dll" } java: { jar: "powershelltask.tasks.automation.teamfoundation.microsoft.com", } node: { script: "powershellhost.js", }
*/
platformInstructions: {
[key: string]: {
[key: string]: string;
};
};
}
export interface TaskGroupDefinition {
displayName: string;
isExpanded: boolean;
name: string;
tags: string[];
}
export interface TaskInputDefinition {
defaultValue: string;
groupName: string;
helpMarkDown: string;
label: string;
name: string;
options: {
[key: string]: string;
};
properties: {
[key: string]: string;
};
required: boolean;
type: string;
visibleRule: string;
}
export interface TaskInstance extends TaskReference {
alwaysRun: boolean;
continueOnError: boolean;
displayName: string;
enabled: boolean;
instanceId: string;
}
export interface TaskLog extends TaskLogReference {
createdOn: Date;
indexLocation: string;
lastChangedOn: Date;
lineCount: number;
path: string;
}
export interface TaskLogReference {
id: number;
location: string;
}
export interface TaskOrchestrationContainer extends TaskOrchestrationItem {
children: TaskOrchestrationItem[];
continueOnError: boolean;
parallel: boolean;
rollback: TaskOrchestrationContainer;
}
export interface TaskOrchestrationItem {
itemType: TaskOrchestrationItemType;
}
export enum TaskOrchestrationItemType {
Container = 0,
Job = 1,
}
export interface TaskOrchestrationJob extends TaskOrchestrationItem {
demands: any[];
executeAs: VSS_Common_Contracts.IdentityRef;
executionTimeout: any;
instanceId: string;
name: string;
tasks: TaskInstance[];
variables: {
[key: string]: string;
};
}
export interface TaskOrchestrationOwner {
_links: any;
id: number;
name: string;
}
export interface TaskOrchestrationPlan extends TaskOrchestrationPlanReference {
environment: PlanEnvironment;
finishTime: Date;
implementation: TaskOrchestrationContainer;
requestedById: string;
requestedForId: string;
result: TaskResult;
resultCode: string;
startTime: Date;
state: TaskOrchestrationPlanState;
timeline: TimelineReference;
}
export interface TaskOrchestrationPlanReference {
artifactLocation: string;
artifactUri: string;
planId: string;
planType: string;
scopeIdentifier: string;
version: number;
}
export enum TaskOrchestrationPlanState {
InProgress = 1,
Queued = 2,
Completed = 4,
}
export interface TaskPackageMetadata {
/**
* Gets the name of the package.
*/
type: string;
/**
* Gets the url of the package.
*/
url: string;
/**
* Gets the version of the package.
*/
version: string;
}
export interface TaskReference {
id: string;
inputs: {
[key: string]: string;
};
name: string;
version: string;
}
export enum TaskResult {
Succeeded = 0,
SucceededWithIssues = 1,
Failed = 2,
Canceled = 3,
Skipped = 4,
Abandoned = 5,
}
export interface TaskSourceDefinition {
authKey: string;
endpoint: string;
keySelector: string;
selector: string;
target: string;
}
export interface TaskVersion {
isTest: boolean;
major: number;
minor: number;
patch: number;
}
/**
* Represents a shallow reference to a TeamProject.
*/
export interface TeamProjectReference {
/**
* Project abbreviation.
*/
abbreviation: string;
/**
* The project's description (if any).
*/
description: string;
/**
* Project identifier.
*/
id: string;
/**
* Project name.
*/
name: string;
/**
* Project state.
*/
state: any;
/**
* Url to the full version of the object.
*/
url: string;
}
export interface Timeline extends TimelineReference {
lastChangedBy: string;
lastChangedOn: Date;
records: TimelineRecord[];
}
export interface TimelineRecord {
changeId: number;
currentOperation: string;
details: TimelineReference;
errorCount: number;
finishTime: Date;
id: string;
issues: Issue[];
lastModified: Date;
location: string;
log: TaskLogReference;
name: string;
order: number;
parentId: string;
percentComplete: number;
result: TaskResult;
resultCode: string;
startTime: Date;
state: TimelineRecordState;
type: string;
warningCount: number;
workerName: string;
}
export enum TimelineRecordState {
Pending = 0,
InProgress = 1,
Completed = 2,
}
export interface TimelineReference {
changeId: number;
id: string;
location: string;
}
export interface WebApiConnectedService extends WebApiConnectedServiceRef {
/**
* The user who did the OAuth authentication to created this service
*/
authenticatedBy: VSS_Common_Contracts.IdentityRef;
/**
* Extra description on the service.
*/
description: string;
/**
* Friendly Name of service connection
*/
friendlyName: string;
/**
* Id/Name of the connection service. For Ex: Subscription Id for Azure Connection
*/
id: string;
/**
* The kind of service.
*/
kind: string;
/**
* The project associated with this service
*/
project: TeamProjectReference;
/**
* Optional uri to connect directly to the service such as https://windows.azure.com
*/
serviceUri: string;
}
export interface WebApiConnectedServiceDetails extends WebApiConnectedServiceRef {
/**
* Meta data for service connection
*/
connectedServiceMetaData: WebApiConnectedService;
/**
* Credential info
*/
credentialsXml: string;
/**
* Optional uri to connect directly to the service such as https://windows.azure.com
*/
endPoint: string;
}
export interface WebApiConnectedServiceRef {
id: string;
url: string;
}
export var TypeInfo: {
AgentPoolEvent: {
fields: any;
};
AgentQueueEvent: {
fields: any;
};
AgentRefreshMessage: {
fields: any;
};
AuthorizationHeader: {
fields: any;
};
ConnectedServiceKind: {
enumValues: {
"custom": number;
"azureSubscription": number;
"chef": number;
"generic": number;
"gitHub": number;
};
};
DataSource: {
fields: any;
};
DataSourceBinding: {
fields: any;
};
EndpointAuthorization: {
fields: any;
};
EndpointUrl: {
fields: any;
};
HelpLink: {
fields: any;
};
Issue: {
fields: any;
};
IssueType: {
enumValues: {
"error": number;
"warning": number;
};
};
JobAssignedEvent: {
fields: any;
};
JobCancelMessage: {
fields: any;
};
JobCompletedEvent: {
fields: any;
};
JobEnvironment: {
fields: any;
};
JobEvent: {
fields: any;
};
JobOption: {
fields: any;
};
JobRequestMessage: {
fields: any;
};
MaskHint: {
fields: any;
};
MaskType: {
enumValues: {
"variable": number;
"regex": number;
};
};
PlanEnvironment: {
fields: any;
};
ServiceEndpoint: {
fields: any;
};
ServiceEndpointAuthenticationScheme: {
fields: any;
};
ServiceEndpointType: {
fields: any;
};
TaskAgent: {
fields: any;
};
TaskAgentJobRequest: {
fields: any;
};
TaskAgentMessage: {
fields: any;
};
TaskAgentPool: {
fields: any;
};
TaskAgentPoolReference: {
fields: any;
};
TaskAgentQueue: {
fields: any;
};
TaskAgentQueueActionFilter: {
enumValues: {
"none": number;
"manage": number;
"use": number;
};
};
TaskAgentReference: {
fields: any;
};
TaskAgentSession: {
fields: any;
};
TaskAgentStatus: {
enumValues: {
"offline": number;
"online": number;
};
};
TaskAttachment: {
fields: any;
};
TaskChangeEvent: {
fields: any;
};
TaskDefinition: {
fields: any;
};
TaskDefinitionEndpoint: {
fields: any;
};
TaskExecution: {
fields: any;
};
TaskGroupDefinition: {
fields: any;
};
TaskInputDefinition: {
fields: any;
};
TaskInstance: {
fields: any;
};
TaskLog: {
fields: any;
};
TaskLogReference: {
fields: any;
};
TaskOrchestrationContainer: {
fields: any;
};
TaskOrchestrationItem: {
fields: any;
};
TaskOrchestrationItemType: {
enumValues: {
"container": number;
"job": number;
};
};
TaskOrchestrationJob: {
fields: any;
};
TaskOrchestrationOwner: {
fields: any;
};
TaskOrchestrationPlan: {
fields: any;
};
TaskOrchestrationPlanReference: {
fields: any;
};
TaskOrchestrationPlanState: {
enumValues: {
"inProgress": number;
"queued": number;
"completed": number;
};
};
TaskPackageMetadata: {
fields: any;
};
TaskReference: {
fields: any;
};
TaskResult: {
enumValues: {
"succeeded": number;
"succeededWithIssues": number;
"failed": number;
"canceled": number;
"skipped": number;
"abandoned": number;
};
};
TaskSourceDefinition: {
fields: any;
};
TaskVersion: {
fields: any;
};
TeamProjectReference: {
fields: any;
};
Timeline: {
fields: any;
};
TimelineRecord: {
fields: any;
};
TimelineRecordState: {
enumValues: {
"pending": number;
"inProgress": number;
"completed": number;
};
};
TimelineReference: {
fields: any;
};
WebApiConnectedService: {
fields: any;
};
WebApiConnectedServiceDetails: {
fields: any;
};
WebApiConnectedServiceRef: {
fields: any;
};
};
}
declare module "TFS/DistributedTask/TaskAgentRestClient" {
import Contracts = require("TFS/DistributedTask/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class TaskAgentHttpClient2_2 extends VSS_WebApi.VssHttpClient {
constructor(rootRequestPath: string);
/**
* @param {Contracts.TaskAgent} agent
* @param {number} poolId
* @return IPromise<Contracts.TaskAgent>
*/
addAgent(agent: Contracts.TaskAgent, poolId: number): IPromise<Contracts.TaskAgent>;
/**
* @param {number} poolId
* @param {number} agentId
* @return IPromise<void>
*/
deleteAgent(poolId: number, agentId: number): IPromise<void>;
/**
* @param {number} poolId
* @param {number} agentId
* @param {boolean} includeCapabilities
* @param {boolean} includeAssignedRequest
* @param {string[]} propertyFilters
* @return IPromise<Contracts.TaskAgent>
*/
getAgent(poolId: number, agentId: number, includeCapabilities?: boolean, includeAssignedRequest?: boolean, propertyFilters?: string[]): IPromise<Contracts.TaskAgent>;
/**
* @param {number} poolId
* @param {string} agentName
* @param {boolean} includeCapabilities
* @param {boolean} includeAssignedRequest
* @param {string[]} propertyFilters
* @param {string[]} demands
* @return IPromise<Contracts.TaskAgent[]>
*/
getAgents(poolId: number, agentName?: string, includeCapabilities?: boolean, includeAssignedRequest?: boolean, propertyFilters?: string[], demands?: string[]): IPromise<Contracts.TaskAgent[]>;
/**
* @param {Contracts.TaskAgent} agent
* @param {number} poolId
* @param {number} agentId
* @return IPromise<Contracts.TaskAgent>
*/
replaceAgent(agent: Contracts.TaskAgent, poolId: number, agentId: number): IPromise<Contracts.TaskAgent>;
/**
* @param {Contracts.TaskAgent} agent
* @param {number} poolId
* @param {number} agentId
* @return IPromise<Contracts.TaskAgent>
*/
updateAgent(agent: Contracts.TaskAgent, poolId: number, agentId: number): IPromise<Contracts.TaskAgent>;
/**
* Proxy for a GET request defined by an 'endpoint'. The request is authorized using a service connection. The response is filtered using an XPath/Json based selector.
*
* @param {Contracts.TaskDefinitionEndpoint} endpoint - Describes the URL to fetch.
* @return IPromise<string[]>
*/
queryEndpoint(endpoint: Contracts.TaskDefinitionEndpoint): IPromise<string[]>;
/**
* @param {number} poolId
* @param {number} requestId
* @param {string} lockToken
* @return IPromise<void>
*/
deleteAgentRequest(poolId: number, requestId: number, lockToken: string): IPromise<void>;
/**
* @param {number} poolId
* @param {number} requestId
* @return IPromise<Contracts.TaskAgentJobRequest>
*/
getAgentRequest(poolId: number, requestId: number): IPromise<Contracts.TaskAgentJobRequest>;
/**
* @param {number} poolId
* @param {number} agentId
* @param {number} completedRequestCount
* @return IPromise<Contracts.TaskAgentJobRequest[]>
*/
getAgentRequestsForAgent(poolId: number, agentId: number, completedRequestCount?: number): IPromise<Contracts.TaskAgentJobRequest[]>;
/**
* @param {number} poolId
* @param {number[]} agentIds
* @param {number} completedRequestCount
* @return IPromise<Contracts.TaskAgentJobRequest[]>
*/
getAgentRequestsForAgents(poolId: number, agentIds: number[], completedRequestCount?: number): IPromise<Contracts.TaskAgentJobRequest[]>;
/**
* @param {number} poolId
* @param {string} planId
* @param {string} jobId
* @return IPromise<Contracts.TaskAgentJobRequest[]>
*/
getAgentRequestsForPlan(poolId: number, planId: string, jobId?: string): IPromise<Contracts.TaskAgentJobRequest[]>;
/**
* @param {Contracts.TaskAgentJobRequest} request
* @param {number} poolId
* @return IPromise<Contracts.TaskAgentJobRequest>
*/
queueAgentRequest(request: Contracts.TaskAgentJobRequest, poolId: number): IPromise<Contracts.TaskAgentJobRequest>;
/**
* @param {Contracts.TaskAgentJobRequest} request
* @param {number} poolId
* @param {number} requestId
* @param {string} lockToken
* @return IPromise<Contracts.TaskAgentJobRequest>
*/
updateAgentRequest(request: Contracts.TaskAgentJobRequest, poolId: number, requestId: number, lockToken: string): IPromise<Contracts.TaskAgentJobRequest>;
/**
* @param {number} poolId
* @param {number} messageId
* @param {string} sessionId
* @return IPromise<void>
*/
deleteMessage(poolId: number, messageId: number, sessionId: string): IPromise<void>;
/**
* @param {number} poolId
* @param {string} sessionId
* @param {number} lastMessageId
* @return IPromise<Contracts.TaskAgentMessage>
*/
getMessage(poolId: number, sessionId: string, lastMessageId?: number): IPromise<Contracts.TaskAgentMessage>;
/**
* @param {number} poolId
* @param {number} agentId
* @return IPromise<void>
*/
refreshAgent(poolId: number, agentId: number): IPromise<void>;
/**
* @param {number} poolId
* @return IPromise<void>
*/
refreshAgents(poolId: number): IPromise<void>;
/**
* @param {Contracts.TaskAgentMessage} message
* @param {number} poolId
* @param {number} requestId
* @return IPromise<void>
*/
sendMessage(message: Contracts.TaskAgentMessage, poolId: number, requestId: number): IPromise<void>;
/**
* This method can return packages/{packageType} -- package stream OR TaskPackageMetadata if requested for json
*
* @param {string} packageType
* @return IPromise<Contracts.TaskPackageMetadata>
*/
getPackage(packageType: string): IPromise<Contracts.TaskPackageMetadata>;
/**
* @return IPromise<Contracts.TaskPackageMetadata[]>
*/
getPackages(): IPromise<Contracts.TaskPackageMetadata[]>;
/**
* This method can return packages/{packageType} -- package stream OR TaskPackageMetadata if requested for json
*
* @param {string} packageType
* @return IPromise<ArrayBuffer>
*/
getPackageZip(packageType: string): IPromise<ArrayBuffer>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} poolId
* @return IPromise<VSS_Common_Contracts.IdentityRef[]>
*/
getAgentPoolRoles(poolId?: number): IPromise<VSS_Common_Contracts.IdentityRef[]>;
/**
* @param {Contracts.TaskAgentPool} pool
* @return IPromise<Contracts.TaskAgentPool>
*/
addAgentPool(pool: Contracts.TaskAgentPool): IPromise<Contracts.TaskAgentPool>;
/**
* @param {number} poolId
* @return IPromise<void>
*/
deleteAgentPool(poolId: number): IPromise<void>;
/**
* @param {number} poolId
* @param {string[]} properties
* @return IPromise<Contracts.TaskAgentPool>
*/
getAgentPool(poolId: number, properties?: string[]): IPromise<Contracts.TaskAgentPool>;
/**
* @param {string} poolName
* @param {string[]} properties
* @return IPromise<Contracts.TaskAgentPool[]>
*/
getAgentPools(poolName?: string, properties?: string[]): IPromise<Contracts.TaskAgentPool[]>;
/**
* @param {Contracts.TaskAgentPool} pool
* @param {number} poolId
* @return IPromise<Contracts.TaskAgentPool>
*/
updateAgentPool(pool: Contracts.TaskAgentPool, poolId: number): IPromise<Contracts.TaskAgentPool>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} queueId
* @return IPromise<VSS_Common_Contracts.IdentityRef[]>
*/
getAgentQueueRoles(queueId?: number): IPromise<VSS_Common_Contracts.IdentityRef[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TaskAgentQueue} queue
* @return IPromise<Contracts.TaskAgentQueue>
*/
addAgentQueue(queue: Contracts.TaskAgentQueue): IPromise<Contracts.TaskAgentQueue>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} queueId
* @return IPromise<void>
*/
deleteAgentQueue(queueId: number): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} queueId
* @param {Contracts.TaskAgentQueueActionFilter} actionFilter
* @return IPromise<Contracts.TaskAgentQueue>
*/
getAgentQueue(queueId: number, actionFilter?: Contracts.TaskAgentQueueActionFilter): IPromise<Contracts.TaskAgentQueue>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} queueName
* @param {Contracts.TaskAgentQueueActionFilter} actionFilter
* @return IPromise<Contracts.TaskAgentQueue[]>
*/
getAgentQueues(queueName?: string, actionFilter?: Contracts.TaskAgentQueueActionFilter): IPromise<Contracts.TaskAgentQueue[]>;
/**
* @exemptedapi
* [Preview API] Proxy for a GET request defined by an service endpoint. The request is authorized using a data source in service endpoint. The response is filtered using an XPath/Json based selector.
*
* @param {Contracts.DataSourceBinding} binding - Describes the data source to fetch.
* @param {string} scopeIdentifier - The project GUID to scope the request
* @return IPromise<string[]>
*/
queryServiceEndpoint(binding: Contracts.DataSourceBinding, scopeIdentifier: string): IPromise<string[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.ServiceEndpoint} endpoint
* @param {string} scopeIdentifier - The project GUID to scope the request
* @return IPromise<Contracts.ServiceEndpoint>
*/
createServiceEndpoint(endpoint: Contracts.ServiceEndpoint, scopeIdentifier: string): IPromise<Contracts.ServiceEndpoint>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} endpointId
* @return IPromise<void>
*/
deleteServiceEndpoint(scopeIdentifier: string, endpointId: string): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} endpointId
* @return IPromise<Contracts.ServiceEndpoint>
*/
getServiceEndpointDetails(scopeIdentifier: string, endpointId: string): IPromise<Contracts.ServiceEndpoint>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} type
* @param {string[]} authSchemes
* @param {string[]} endpointIds
* @return IPromise<Contracts.ServiceEndpoint[]>
*/
getServiceEndpoints(scopeIdentifier: string, type?: string, authSchemes?: string[], endpointIds?: string[]): IPromise<Contracts.ServiceEndpoint[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.ServiceEndpoint} endpoint
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} endpointId
* @return IPromise<Contracts.ServiceEndpoint>
*/
updateServiceEndpoint(endpoint: Contracts.ServiceEndpoint, scopeIdentifier: string, endpointId: string): IPromise<Contracts.ServiceEndpoint>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} type
* @param {string} scheme
* @return IPromise<Contracts.ServiceEndpointType[]>
*/
getServiceEndpointTypes(scopeIdentifier: string, type?: string, scheme?: string): IPromise<Contracts.ServiceEndpointType[]>;
/**
* @param {Contracts.TaskAgentSession} session
* @param {number} poolId
* @return IPromise<Contracts.TaskAgentSession>
*/
createAgentSession(session: Contracts.TaskAgentSession, poolId: number): IPromise<Contracts.TaskAgentSession>;
/**
* @param {number} poolId
* @param {string} sessionId
* @return IPromise<void>
*/
deleteAgentSession(poolId: number, sessionId: string): IPromise<void>;
/**
* @param {string} taskId
* @return IPromise<void>
*/
deleteTaskDefinition(taskId: string): IPromise<void>;
/**
* @param {string} taskId
* @param {string} versionString
* @param {string[]} visibility
* @param {boolean} scopeLocal
* @return IPromise<ArrayBuffer>
*/
getTaskContentZip(taskId: string, versionString: string, visibility?: string[], scopeLocal?: boolean): IPromise<ArrayBuffer>;
/**
* @param {string} taskId
* @param {string} versionString
* @param {string[]} visibility
* @param {boolean} scopeLocal
* @return IPromise<Contracts.TaskDefinition>
*/
getTaskDefinition(taskId: string, versionString: string, visibility?: string[], scopeLocal?: boolean): IPromise<Contracts.TaskDefinition>;
/**
* @param {string} taskId
* @param {string[]} visibility
* @param {boolean} scopeLocal
* @return IPromise<Contracts.TaskDefinition[]>
*/
getTaskDefinitions(taskId?: string, visibility?: string[], scopeLocal?: boolean): IPromise<Contracts.TaskDefinition[]>;
/**
* @param {{ [key: string] : string; }} userCapabilities
* @param {number} poolId
* @param {number} agentId
* @return IPromise<Contracts.TaskAgent>
*/
updateAgentUserCapabilities(userCapabilities: {
[key: string]: string;
}, poolId: number, agentId: number): IPromise<Contracts.TaskAgent>;
}
export class TaskAgentHttpClient extends TaskAgentHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return TaskAgentHttpClient2_2
*/
export function getClient(): TaskAgentHttpClient2_2;
}
declare module "TFS/DistributedTask/TaskRestClient" {
import TFS_DistributedTask_Contracts = require("TFS/DistributedTask/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class TaskHttpClient2_2 extends VSS_WebApi.VssHttpClient {
constructor(rootRequestPath: string);
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} type
* @return IPromise<TFS_DistributedTask_Contracts.TaskAttachment[]>
*/
getPlanAttachments(scopeIdentifier: string, hubName: string, planId: string, type: string): IPromise<TFS_DistributedTask_Contracts.TaskAttachment[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} content - Content to upload
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {string} recordId
* @param {string} type
* @param {string} name
* @return IPromise<TFS_DistributedTask_Contracts.TaskAttachment>
*/
createAttachment(content: string, scopeIdentifier: string, hubName: string, planId: string, timelineId: string, recordId: string, type: string, name: string): IPromise<TFS_DistributedTask_Contracts.TaskAttachment>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {string} recordId
* @param {string} type
* @param {string} name
* @return IPromise<TFS_DistributedTask_Contracts.TaskAttachment>
*/
getAttachment(scopeIdentifier: string, hubName: string, planId: string, timelineId: string, recordId: string, type: string, name: string): IPromise<TFS_DistributedTask_Contracts.TaskAttachment>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {string} recordId
* @param {string} type
* @param {string} name
* @return IPromise<ArrayBuffer>
*/
getAttachmentContent(scopeIdentifier: string, hubName: string, planId: string, timelineId: string, recordId: string, type: string, name: string): IPromise<ArrayBuffer>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {string} recordId
* @param {string} type
* @return IPromise<TFS_DistributedTask_Contracts.TaskAttachment[]>
*/
getAttachments(scopeIdentifier: string, hubName: string, planId: string, timelineId: string, recordId: string, type: string): IPromise<TFS_DistributedTask_Contracts.TaskAttachment[]>;
/**
* @param {VSS_Common_Contracts.VssJsonCollectionWrapperV<string[]>} lines
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {string} recordId
* @return IPromise<void>
*/
appendTimelineRecordFeed(lines: VSS_Common_Contracts.VssJsonCollectionWrapperV<string[]>, scopeIdentifier: string, hubName: string, planId: string, timelineId: string, recordId: string): IPromise<void>;
/**
* @param {string} content - Content to upload
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {number} logId
* @return IPromise<TFS_DistributedTask_Contracts.TaskLog>
*/
appendLogContent(content: string, scopeIdentifier: string, hubName: string, planId: string, logId: number): IPromise<TFS_DistributedTask_Contracts.TaskLog>;
/**
* @param {TFS_DistributedTask_Contracts.TaskLog} log
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @return IPromise<TFS_DistributedTask_Contracts.TaskLog>
*/
createLog(log: TFS_DistributedTask_Contracts.TaskLog, scopeIdentifier: string, hubName: string, planId: string): IPromise<TFS_DistributedTask_Contracts.TaskLog>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {number} logId
* @param {number} startLine
* @param {number} endLine
* @return IPromise<string[]>
*/
getLog(scopeIdentifier: string, hubName: string, planId: string, logId: number, startLine?: number, endLine?: number): IPromise<string[]>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @return IPromise<TFS_DistributedTask_Contracts.TaskLog[]>
*/
getLogs(scopeIdentifier: string, hubName: string, planId: string): IPromise<TFS_DistributedTask_Contracts.TaskLog[]>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @return IPromise<TFS_DistributedTask_Contracts.TaskOrchestrationPlan>
*/
getPlan(scopeIdentifier: string, hubName: string, planId: string): IPromise<TFS_DistributedTask_Contracts.TaskOrchestrationPlan>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {number} changeId
* @return IPromise<TFS_DistributedTask_Contracts.TimelineRecord[]>
*/
getRecords(scopeIdentifier: string, hubName: string, planId: string, timelineId: string, changeId?: number): IPromise<TFS_DistributedTask_Contracts.TimelineRecord[]>;
/**
* @param {VSS_Common_Contracts.VssJsonCollectionWrapperV<TFS_DistributedTask_Contracts.TimelineRecord[]>} records
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @return IPromise<TFS_DistributedTask_Contracts.TimelineRecord[]>
*/
updateRecords(records: VSS_Common_Contracts.VssJsonCollectionWrapperV<TFS_DistributedTask_Contracts.TimelineRecord[]>, scopeIdentifier: string, hubName: string, planId: string, timelineId: string): IPromise<TFS_DistributedTask_Contracts.TimelineRecord[]>;
/**
* @param {TFS_DistributedTask_Contracts.Timeline} timeline
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @return IPromise<TFS_DistributedTask_Contracts.Timeline>
*/
createTimeline(timeline: TFS_DistributedTask_Contracts.Timeline, scopeIdentifier: string, hubName: string, planId: string): IPromise<TFS_DistributedTask_Contracts.Timeline>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @return IPromise<void>
*/
deleteTimeline(scopeIdentifier: string, hubName: string, planId: string, timelineId: string): IPromise<void>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @param {string} timelineId
* @param {number} changeId
* @param {boolean} includeRecords
* @return IPromise<TFS_DistributedTask_Contracts.Timeline>
*/
getTimeline(scopeIdentifier: string, hubName: string, planId: string, timelineId: string, changeId?: number, includeRecords?: boolean): IPromise<TFS_DistributedTask_Contracts.Timeline>;
/**
* @param {string} scopeIdentifier - The project GUID to scope the request
* @param {string} hubName - The name of the server hub: "build" for the Build server or "rm" for the Release Management server
* @param {string} planId
* @return IPromise<TFS_DistributedTask_Contracts.Timeline[]>
*/
getTimelines(scopeIdentifier: string, hubName: string, planId: string): IPromise<TFS_DistributedTask_Contracts.Timeline[]>;
}
export class TaskHttpClient extends TaskHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return TaskHttpClient2_2
*/
export function getClient(): TaskHttpClient2_2;
}
declare module "TFS/TestManagement/Contracts" {
import TFS_Core_Contracts = require("TFS/Core/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
export interface AggregatedResultsAnalysis {
duration: any;
previousContext: TestResultsContext;
resultsByOutcome: {
[key: string]: AggregatedResultsByOutcome;
};
resultsDifference: AggregatedResultsDifference;
totalTests: number;
}
export interface AggregatedResultsByOutcome {
count: number;
duration: any;
outcome: TestOutcome;
}
export interface AggregatedResultsDifference {
increaseInDuration: any;
increaseInFailures: number;
increaseInPassedTests: number;
increaseInTotalTests: number;
}
export interface AggregatedResultsForBuild {
build: BuildReference;
/**
* This is tests execution duration in a build.
*/
duration: any;
resultsByOutcome: {
[key: string]: AggregatedResultsByOutcome;
};
}
export interface AggregatedResultsWithDetails {
groupByField: string;
resultsForGroup: TestResultsDetailsForGroup[];
}
export enum AttachmentType {
GeneralAttachment = 0,
AfnStrip = 1,
BugFilingData = 2,
CodeCoverage = 3,
IntermediateCollectorData = 4,
RunConfig = 5,
TestImpactDetails = 6,
TmiTestRunDeploymentFiles = 7,
TmiTestRunReverseDeploymentFiles = 8,
TmiTestResultDetail = 9,
TmiTestRunSummary = 10,
}
export interface BatchResponse {
error: string;
responses: Response[];
status: string;
}
export interface BuildConfiguration {
branchName: string;
buildDefinitionId: number;
flavor: string;
id: number;
number: string;
platform: string;
project: ShallowReference;
repositoryId: number;
sourceVersion: string;
uri: string;
}
export interface BuildCoverage {
codeCoverageFileUrl: string;
configuration: BuildConfiguration;
lastError: string;
modules: ModuleCoverage[];
state: string;
}
export interface BuildReference {
branchName: string;
buildSystem: string;
definitionId: number;
id: number;
number: string;
uri: string;
}
export interface CloneOperationInformation {
cloneStatistics: CloneStatistics;
/**
* If the operation is complete, the DateTime of completion. If operation is not complete, this is DateTime.MaxValue
*/
completionDate: Date;
/**
* DateTime when the operation was started
*/
creationDate: Date;
/**
* Shallow reference of the destination
*/
destinationObject: ShallowReference;
/**
* Shallow reference of the destination
*/
destinationPlan: ShallowReference;
/**
* Shallow reference of the destination
*/
destinationProject: ShallowReference;
/**
* If the operation has Failed, Message contains the reason for failure. Null otherwise.
*/
message: string;
/**
* The ID of the operation
*/
opId: number;
/**
* The type of the object generated as a result of the Clone operation
*/
resultObjectType: ResultObjectType;
/**
* Shallow reference of the source
*/
sourceObject: ShallowReference;
/**
* Shallow reference of the source
*/
sourcePlan: ShallowReference;
/**
* Shallow reference of the source
*/
sourceProject: ShallowReference;
/**
* Current state of the operation. When State reaches Suceeded or Failed, the operation is complete
*/
state: CloneOperationState;
/**
* Url for geting the clone information
*/
url: string;
}
export enum CloneOperationState {
Failed = 2,
InProgress = 1,
Queued = 0,
Succeeded = 3,
}
export interface CloneOptions {
/**
* If set to true requirements will be cloned
*/
cloneRequirements: boolean;
/**
* copy all suites from a source plan
*/
copyAllSuites: boolean;
/**
* copy ancestor hieracrchy
*/
copyAncestorHierarchy: boolean;
/**
* Name of the workitem type of the clone
*/
destinationWorkItemType: string;
/**
* Key value pairs where the key value is overridden by the value.
*/
overrideParameters: {
[key: string]: string;
};
/**
* Comment on the link that will link the new clone test case to the original Set null for no comment
*/
relatedLinkComment: string;
}
export interface CloneStatistics {
/**
* Number of Requirments cloned so far.
*/
clonedRequirementsCount: number;
/**
* Number of shared steps cloned so far.
*/
clonedSharedStepsCount: number;
/**
* Number of test cases cloned so far
*/
clonedTestCasesCount: number;
/**
* Total number of requirements to be cloned
*/
totalRequirementsCount: number;
/**
* Total number of test cases to be cloned
*/
totalTestCasesCount: number;
}
/**
* Represents the build configuration (platform, flavor) and coverage data for the build
*/
export interface CodeCoverageData {
/**
* Flavor of build for which data is retrieved/published
*/
buildFlavor: string;
/**
* Platform of build for which data is retrieved/published
*/
buildPlatform: string;
/**
* List of coverage data for the build
*/
coverageStats: CodeCoverageStatistics[];
}
/**
* Represents the code coverage statistics for a particular coverage label (modules, statements, blocks, etc.)
*/
export interface CodeCoverageStatistics {
/**
* Covered units
*/
covered: number;
/**
* Delta of coverage
*/
delta: number;
/**
* Is delta valid
*/
isDeltaAvailable: boolean;
/**
* Label of coverage data ("Blocks", "Statements", "Modules", etc.)
*/
label: string;
/**
* Position of label
*/
position: number;
/**
* Total units
*/
total: number;
}
/**
* Represents the code coverage summary results Used to publish or retrieve code coverage summary against a build
*/
export interface CodeCoverageSummary {
/**
* Uri of build for which data is retrieved/published
*/
build: ShallowReference;
/**
* List of coverage data and details for the build
*/
coverageData: CodeCoverageData[];
/**
* Uri of build against which difference in coverage is computed
*/
deltaBuild: ShallowReference;
}
export enum CoverageQueryFlags {
/**
* If set, the Coverage.Modules property will be populated.
*/
Modules = 1,
/**
* If set, the ModuleCoverage.Functions properties will be populated.
*/
Functions = 2,
/**
* If set, the ModuleCoverage.CoverageData field will be populated.
*/
BlockData = 4,
}
export interface CoverageStatistics {
blocksCovered: number;
blocksNotCovered: number;
linesCovered: number;
linesNotCovered: number;
linesPartiallyCovered: number;
}
export interface CustomTestField {
fieldName: string;
value: any;
}
export interface CustomTestFieldDefinition {
fieldId: number;
fieldName: string;
fieldType: CustomTestFieldType;
scope: CustomTestFieldScope;
}
export enum CustomTestFieldScope {
None = 0,
TestRun = 1,
TestResult = 2,
System = 4,
All = 7,
}
export enum CustomTestFieldType {
Bit = 2,
DateTime = 4,
Int = 8,
Float = 6,
String = 12,
Guid = 14,
}
/**
* This is a temporary class to provide the details for the test run environment.
*/
export interface DtlEnvironmentDetails {
csmContent: string;
csmParameters: string;
subscriptionName: string;
}
export interface FailingSince {
build: BuildReference;
date: Date;
release: ReleaseReference;
}
export interface FunctionCoverage {
class: string;
name: string;
namespace: string;
sourceFile: string;
statistics: CoverageStatistics;
}
export enum GroupTestResultsBy {
None = 0,
AutomatedTestStorage = 1,
}
export interface LastResultDetails {
dateCompleted: Date;
duration: number;
runBy: VSS_Common_Contracts.IdentityRef;
}
export interface ModuleCoverage {
blockCount: number;
blockData: number[];
functions: FunctionCoverage[];
name: string;
signature: string;
signatureAge: number;
statistics: CoverageStatistics;
}
export interface PlanUpdateModel {
area: ShallowReference;
automatedTestEnvironment: TestEnvironment;
automatedTestSettings: TestSettings;
build: ShallowReference;
configurationIds: number[];
description: string;
endDate: string;
iteration: string;
manualTestEnvironment: TestEnvironment;
manualTestSettings: TestSettings;
name: string;
owner: VSS_Common_Contracts.IdentityRef;
startDate: string;
state: string;
status: string;
}
export interface PointAssignment {
configuration: ShallowReference;
tester: VSS_Common_Contracts.IdentityRef;
}
export interface PointUpdateModel {
}
export interface PointWorkItemProperty {
workItem: {
key: string;
value: any;
};
}
export interface QueryModel {
query: string;
}
export interface ReleaseReference {
definitionId: number;
environmentDefinitionId: number;
environmentId: number;
id: number;
}
export interface Response {
error: string;
id: string;
status: string;
url: string;
}
export enum ResultDetails {
None = 0,
Iterations = 1,
WorkItems = 2,
}
export enum ResultObjectType {
TestSuite = 0,
TestPlan = 1,
}
export enum ResultOutcome {
Pass = 1,
Fail = 2,
Pending = 3,
}
export interface ResultRetentionSettings {
automatedResultsRetentionDuration: number;
lastUpdatedBy: VSS_Common_Contracts.IdentityRef;
lastUpdatedDate: Date;
manualResultsRetentionDuration: number;
}
export interface ResultUpdateRequestModel {
actionResultDeletes: TestActionResultModel[];
actionResults: TestActionResultModel[];
parameterDeletes: TestResultParameterModel[];
parameters: TestResultParameterModel[];
testCaseResult: TestCaseResultUpdateModel;
}
export interface ResultUpdateResponseModel {
revision: number;
}
export interface RunCreateModel {
automated: boolean;
build: ShallowReference;
buildDropLocation: string;
buildFlavor: string;
buildPlatform: string;
comment: string;
completeDate: string;
configurationIds: number[];
controller: string;
customTestFields: CustomTestField[];
dtlAutEnvironment: ShallowReference;
dtlTestEnvironment: ShallowReference;
dueDate: string;
environmentDetails: DtlEnvironmentDetails;
errorMessage: string;
filter: RunFilter;
iteration: string;
name: string;
owner: VSS_Common_Contracts.IdentityRef;
plan: ShallowReference;
pointIds: number[];
releaseEnvironmentUri: string;
releaseUri: string;
runTimeout: any;
sourceWorkflow: string;
startDate: string;
state: string;
testConfigurationsMapping: string;
testEnvironmentId: string;
testSettings: ShallowReference;
type: string;
}
/**
* This class is used to provide the filters used for discovery
*/
export interface RunFilter {
/**
* filter for the test case sources (test containers)
*/
sourceFilter: string;
/**
* filter for the test cases
*/
testCaseFilter: string;
}
export interface RunStatistic {
count: number;
outcome: string;
resolutionState: TestResolutionState;
state: string;
}
export interface RunUpdateModel {
build: ShallowReference;
comment: string;
completedDate: string;
controller: string;
deleteInProgressResults: boolean;
dtlAutEnvironment: ShallowReference;
dtlEnvironment: ShallowReference;
dtlEnvironmentDetails: DtlEnvironmentDetails;
dueDate: string;
errorMessage: string;
iteration: string;
logEntries: TestMessageLogDetails[];
name: string;
startedDate: string;
state: string;
substate: TestRunSubstate;
testEnvironmentId: string;
testSettings: ShallowReference;
}
/**
* An abstracted reference to some other resource. This class is used to provide the build data contracts with a uniform way to reference other resources in a way that provides easy traversal through links.
*/
export interface ShallowReference {
/**
* Id of the resource
*/
id: string;
/**
* Name of the linked resource (definition name, controller name, etc.)
*/
name: string;
/**
* Full http link to the resource
*/
url: string;
}
export interface SharedStepModel {
id: number;
revision: number;
}
export interface SuiteCreateModel {
}
export interface SuiteTestCase {
pointAssignments: PointAssignment[];
testCase: WorkItemReference;
}
export interface SuiteUpdateModel {
}
export interface TestActionResultModel extends TestResultModelBase {
actionPath: string;
iterationId: number;
sharedStepModel: SharedStepModel;
url: string;
}
export interface TestAttachmentReference {
id: number;
url: string;
}
export interface TestAttachmentRequestModel {
attachmentType: string;
comment: string;
fileName: string;
stream: string;
}
export interface TestCaseResult {
afnStripId: number;
area: ShallowReference;
associatedBugs: ShallowReference[];
automatedTestId: string;
automatedTestName: string;
automatedTestStorage: string;
automatedTestType: string;
automatedTestTypeId: string;
build: ShallowReference;
buildReference: BuildReference;
comment: string;
completedDate: Date;
computerName: string;
configuration: ShallowReference;
createdDate: Date;
customFields: CustomTestField[];
durationInMs: number;
errorMessage: string;
failingSince: FailingSince;
failureType: string;
id: number;
iterationDetails: TestIterationDetailsModel[];
lastUpdatedBy: VSS_Common_Contracts.IdentityRef;
lastUpdatedDate: Date;
outcome: string;
owner: VSS_Common_Contracts.IdentityRef;
priority: number;
project: ShallowReference;
releaseReference: ReleaseReference;
resetCount: number;
resolutionState: string;
resolutionStateId: number;
revision: number;
runBy: VSS_Common_Contracts.IdentityRef;
stackTrace: string;
startedDate: Date;
state: string;
testCase: ShallowReference;
testCaseTitle: string;
testPoint: ShallowReference;
testRun: ShallowReference;
url: string;
}
export interface TestCaseResult2 {
componentId: string;
custom: any;
endTime: Date;
exceptionStack: string;
externalArtifacts: string[];
externalRunId: string;
externalSystem: string;
externalTestId: string;
failureReasons: string[];
failureSummary: string;
investigationNotes: string;
isSuperseded: boolean;
isValid: boolean;
outcome: ResultOutcome;
resultCustomPropertiesTypeName: string;
resultId: string;
resultName: string;
runId: string;
startTime: Date;
testId: string;
tfsSecurityKey: string;
}
export interface TestCaseResultAttachmentModel {
id: number;
iterationId: number;
name: string;
size: number;
url: string;
}
export interface TestCaseResultIdentifier {
testResultId: number;
testRunId: number;
}
export interface TestCaseResultUpdateModel {
associatedWorkItems: number[];
automatedTestTypeId: string;
comment: string;
completedDate: string;
computerName: string;
customFields: CustomTestField[];
durationInMs: string;
errorMessage: string;
failureType: string;
outcome: string;
owner: VSS_Common_Contracts.IdentityRef;
resolutionState: string;
runBy: VSS_Common_Contracts.IdentityRef;
stackTrace: string;
startedDate: string;
state: string;
testCasePriority: string;
testResult: ShallowReference;
}
export interface TestConfiguration {
/**
* Area of the configuration
*/
area: ShallowReference;
/**
* Description of the configuration
*/
description: string;
/**
* Id of the configuration
*/
id: number;
/**
* Is the configuration a default for the test plans
*/
isDefault: boolean;
/**
* Last Updated By Reference
*/
lastUpdatedBy: VSS_Common_Contracts.IdentityRef;
/**
* Last Updated Data
*/
lastUpdatedDate: Date;
/**
* Name of the configuration
*/
name: string;
/**
* Project to which the configuration belongs
*/
project: ShallowReference;
/**
* Revision of the the configuration
*/
revision: number;
/**
* State of the configuration
*/
state: TestConfigurationState;
/**
* Url of Configuration Resource
*/
url: string;
/**
* Dictionary of Test Variable, Selected Value
*/
values: {
[key: string]: string;
};
}
export enum TestConfigurationState {
/**
* The configuration can be used for new test runs.
*/
Active = 1,
/**
* The configuration has been retired and should not be used for new test runs.
*/
Inactive = 2,
}
export interface TestEnvironment {
environmentId: string;
environmentName: string;
}
export interface TestFailureDetails {
count: number;
testResults: ShallowReference[];
}
export interface TestFailuresAnalysis {
existingFailures: TestFailureDetails;
fixedTests: TestFailureDetails;
newFailures: TestFailureDetails;
previousContext: TestResultsContext;
}
export interface TestIterationDetailsModel {
actionResults: TestActionResultModel[];
attachments: TestCaseResultAttachmentModel[];
comment: string;
completedDate: Date;
durationInMs: number;
errorMessage: string;
id: number;
outcome: string;
parameters: TestResultParameterModel[];
startedDate: Date;
url: string;
}
/**
* An abstracted reference to some other resource. This class is used to provide the build data contracts with a uniform way to reference other resources in a way that provides easy traversal through links.
*/
export interface TestMessageLogDetails {
/**
* Date when the resource is created
*/
dateCreated: Date;
/**
* Id of the resource
*/
entryId: number;
/**
* Message of the resource
*/
message: string;
}
export enum TestOutcome {
/**
* Only used during an update to preserve the existing value.
*/
Unspecified = 0,
/**
* Test has not been completed, or the test type does not report pass/failure.
*/
None = 1,
/**
* Test was executed w/o any issues.
*/
Passed = 2,
/**
* Test was executed, but there were issues. Issues may involve exceptions or failed assertions.
*/
Failed = 3,
/**
* Test has completed, but we can't say if it passed or failed. May be used for aborted tests...
*/
Inconclusive = 4,
/**
* The test timed out
*/
Timeout = 5,
/**
* Test was aborted. This was not caused by a user gesture, but rather by a framework decision.
*/
Aborted = 6,
/**
* Test had it chance for been executed but was not, as ITestElement.IsRunnable == false.
*/
Blocked = 7,
/**
* Test was not executed. This was caused by a user gesture - e.g. user hit stop button.
*/
NotExecuted = 8,
/**
* To be used by Run level results. This is not a failure.
*/
Warning = 9,
/**
* There was a system error while we were trying to execute a test.
*/
Error = 10,
/**
* Test is Not Applicable for execution.
*/
NotApplicable = 11,
/**
* Test is paused.
*/
Paused = 12,
/**
* Test is currently executing. Added this for TCM charts
*/
InProgress = 13,
MaxValue = 13,
}
export interface TestPlan {
area: ShallowReference;
automatedTestEnvironment: TestEnvironment;
automatedTestSettings: TestSettings;
build: ShallowReference;
clientUrl: string;
description: string;
endDate: Date;
id: number;
iteration: string;
manualTestEnvironment: TestEnvironment;
manualTestSettings: TestSettings;
name: string;
owner: VSS_Common_Contracts.IdentityRef;
previousBuild: ShallowReference;
project: ShallowReference;
revision: number;
rootSuite: ShallowReference;
startDate: Date;
state: string;
updatedBy: VSS_Common_Contracts.IdentityRef;
updatedDate: Date;
url: string;
}
export interface TestPlanCloneRequest {
cloneOptions: CloneOptions;
destinationTestPlan: TestPlan;
suiteIds: number[];
}
export interface TestPlansWithSelection {
lastSelectedPlan: number;
lastSelectedSuite: number;
plans: TestPlan[];
}
export interface TestPoint {
assignedTo: VSS_Common_Contracts.IdentityRef;
automated: boolean;
comment: string;
configuration: ShallowReference;
failureType: string;
id: number;
lastResolutionStateId: number;
lastResult: ShallowReference;
lastResultDetails: LastResultDetails;
lastRunBuildNumber: string;
lastTestRun: ShallowReference;
lastUpdatedBy: VSS_Common_Contracts.IdentityRef;
lastUpdatedDate: Date;
outcome: string;
revision: number;
state: string;
suite: ShallowReference;
testCase: WorkItemReference;
testPlan: ShallowReference;
url: string;
workItemProperties: any[];
}
export interface TestReport {
aggregatedResultsAnalysis: AggregatedResultsAnalysis;
teamProject: TFS_Core_Contracts.TeamProjectReference;
testFailures: TestFailuresAnalysis;
testResultsContext: TestResultsContext;
}
export interface TestResolutionState {
id: number;
name: string;
project: ShallowReference;
}
export interface TestResultCreateModel {
area: ShallowReference;
associatedWorkItems: number[];
automatedTestId: string;
automatedTestName: string;
automatedTestStorage: string;
automatedTestType: string;
automatedTestTypeId: string;
comment: string;
completedDate: string;
computerName: string;
configuration: ShallowReference;
customFields: CustomTestField[];
durationInMs: string;
errorMessage: string;
failureType: string;
outcome: string;
owner: VSS_Common_Contracts.IdentityRef;
resolutionState: string;
runBy: VSS_Common_Contracts.IdentityRef;
stackTrace: string;
startedDate: string;
state: string;
testCase: ShallowReference;
testCasePriority: string;
testCaseTitle: string;
testPoint: ShallowReference;
}
export interface TestResultModelBase {
comment: string;
completedDate: Date;
durationInMs: number;
errorMessage: string;
outcome: string;
startedDate: Date;
}
export interface TestResultParameterModel {
actionPath: string;
iterationId: number;
parameterName: string;
url: string;
value: string;
}
export interface TestResultsContext {
build: BuildReference;
contextType: TestResultsContextType;
release: ReleaseReference;
}
export enum TestResultsContextType {
Build = 1,
Release = 2,
}
export interface TestResultsDetailsForGroup {
groupByValue: any;
ids: TestCaseResultIdentifier[];
resultsCountByOutcome: {
[key: string]: AggregatedResultsByOutcome;
};
}
export interface TestResultTrendFilter {
branchNames: string[];
definitionIds: number[];
sourceWorkflow: string;
testRunTitles: string[];
}
export interface TestRun {
build: ShallowReference;
buildConfiguration: BuildConfiguration;
comment: string;
completedDate: Date;
controller: string;
createdDate: Date;
customFields: CustomTestField[];
dropLocation: string;
dtlAutEnvironment: ShallowReference;
dtlEnvironment: ShallowReference;
dtlEnvironmentCreationDetails: DtlEnvironmentDetails;
dueDate: Date;
errorMessage: string;
filter: RunFilter;
id: number;
incompleteTests: number;
isAutomated: boolean;
iteration: string;
lastUpdatedBy: VSS_Common_Contracts.IdentityRef;
lastUpdatedDate: Date;
name: string;
notApplicableTests: number;
owner: VSS_Common_Contracts.IdentityRef;
passedTests: number;
phase: string;
plan: ShallowReference;
postProcessState: string;
project: ShallowReference;
releaseEnvironmentUri: string;
releaseUri: string;
revision: number;
runStatistics: RunStatistic[];
startedDate: Date;
state: string;
substate: TestRunSubstate;
testEnvironment: TestEnvironment;
testMessageLogId: number;
testSettings: ShallowReference;
totalTests: number;
unanalyzedTests: number;
url: string;
webAccessUrl: string;
}
export interface TestRunCoverage {
lastError: string;
modules: ModuleCoverage[];
state: string;
testRun: ShallowReference;
}
export enum TestRunState {
/**
* Only used during an update to preserve the existing value.
*/
Unspecified = 0,
/**
* The run is still being created. No tests have started yet.
*/
NotStarted = 1,
/**
* Tests are running.
*/
InProgress = 2,
/**
* All tests have completed or been skipped.
*/
Completed = 3,
/**
* Run is stopped and remaing tests have been aborted
*/
Aborted = 4,
/**
* Run is currently initializing This is a legacy state and should not be used any more
*/
Waiting = 5,
/**
* Run requires investigation because of a test point failure This is a legacy state and should not be used any more
*/
NeedsInvestigation = 6,
}
export interface TestRunStatistic {
run: ShallowReference;
runStatistics: RunStatistic[];
}
export enum TestRunSubstate {
None = 0,
CreatingEnvironment = 1,
RunningTests = 2,
CanceledByUser = 3,
AbortedBySystem = 4,
TimedOut = 5,
PendingAnalysis = 6,
Analyzed = 7,
CancellationInProgress = 8,
}
/**
* Represents the test settings of the run. Used to create test settings and fetch test settings
*/
export interface TestSettings {
/**
* Area path required to create test settings
*/
areaPath: string;
/**
* Description of the test settings. Used in create test settings.
*/
description: string;
/**
* Indicates if the tests settings is public or private.Used in create test settings.
*/
isPublic: boolean;
/**
* Xml string of machine roles. Used in create test settings.
*/
machineRoles: string;
/**
* Test settings content.
*/
testSettingsContent: string;
/**
* Test settings id.
*/
testSettingsId: number;
/**
* Test settings name.
*/
testSettingsName: string;
}
export interface TestSuite {
areaUri: string;
defaultConfigurations: ShallowReference[];
id: number;
inheritDefaultConfigurations: boolean;
lastError: string;
lastPopulatedDate: Date;
lastUpdatedBy: VSS_Common_Contracts.IdentityRef;
lastUpdatedDate: Date;
name: string;
parent: ShallowReference;
plan: ShallowReference;
project: ShallowReference;
queryString: string;
requirementId: number;
revision: number;
state: string;
suites: ShallowReference[];
suiteType: string;
testCaseCount: number;
testCasesUrl: string;
url: string;
}
export interface TestSuiteCloneRequest {
cloneOptions: CloneOptions;
destinationSuiteId: number;
destinationSuiteProjectName: string;
}
export interface TestVariable {
/**
* Description of the test variable
*/
description: string;
/**
* Id of the test variable
*/
id: number;
/**
* Name of the test variable
*/
name: string;
/**
* Project to which the test variable belongs
*/
project: ShallowReference;
/**
* Revision
*/
revision: number;
/**
* Url of the test variable
*/
url: string;
/**
* List of allowed values
*/
values: string[];
}
export interface WorkItemReference {
id: string;
name: string;
url: string;
webUrl: string;
}
export var TypeInfo: {
AggregatedResultsAnalysis: {
fields: any;
};
AggregatedResultsByOutcome: {
fields: any;
};
AggregatedResultsDifference: {
fields: any;
};
AggregatedResultsForBuild: {
fields: any;
};
AggregatedResultsWithDetails: {
fields: any;
};
AttachmentType: {
enumValues: {
"generalAttachment": number;
"afnStrip": number;
"bugFilingData": number;
"codeCoverage": number;
"intermediateCollectorData": number;
"runConfig": number;
"testImpactDetails": number;
"tmiTestRunDeploymentFiles": number;
"tmiTestRunReverseDeploymentFiles": number;
"tmiTestResultDetail": number;
"tmiTestRunSummary": number;
};
};
BatchResponse: {
fields: any;
};
BuildConfiguration: {
fields: any;
};
BuildCoverage: {
fields: any;
};
BuildReference: {
fields: any;
};
CloneOperationInformation: {
fields: any;
};
CloneOperationState: {
enumValues: {
"failed": number;
"inProgress": number;
"queued": number;
"succeeded": number;
};
};
CloneOptions: {
fields: any;
};
CloneStatistics: {
fields: any;
};
CodeCoverageData: {
fields: any;
};
CodeCoverageStatistics: {
fields: any;
};
CodeCoverageSummary: {
fields: any;
};
CoverageQueryFlags: {
enumValues: {
"modules": number;
"functions": number;
"blockData": number;
};
};
CoverageStatistics: {
fields: any;
};
CustomTestField: {
fields: any;
};
CustomTestFieldDefinition: {
fields: any;
};
CustomTestFieldScope: {
enumValues: {
"none": number;
"testRun": number;
"testResult": number;
"system": number;
"all": number;
};
};
CustomTestFieldType: {
enumValues: {
"bit": number;
"dateTime": number;
"int": number;
"float": number;
"string": number;
"guid": number;
};
};
DtlEnvironmentDetails: {
fields: any;
};
FailingSince: {
fields: any;
};
FunctionCoverage: {
fields: any;
};
GroupTestResultsBy: {
enumValues: {
"none": number;
"automatedTestStorage": number;
};
};
LastResultDetails: {
fields: any;
};
ModuleCoverage: {
fields: any;
};
PlanUpdateModel: {
fields: any;
};
PointAssignment: {
fields: any;
};
PointUpdateModel: {
fields: any;
};
PointWorkItemProperty: {
fields: any;
};
QueryModel: {
fields: any;
};
ReleaseReference: {
fields: any;
};
Response: {
fields: any;
};
ResultDetails: {
enumValues: {
"none": number;
"iterations": number;
"workItems": number;
};
};
ResultObjectType: {
enumValues: {
"testSuite": number;
"testPlan": number;
};
};
ResultOutcome: {
enumValues: {
"pass": number;
"fail": number;
"pending": number;
};
};
ResultRetentionSettings: {
fields: any;
};
ResultUpdateRequestModel: {
fields: any;
};
ResultUpdateResponseModel: {
fields: any;
};
RunCreateModel: {
fields: any;
};
RunFilter: {
fields: any;
};
RunStatistic: {
fields: any;
};
RunUpdateModel: {
fields: any;
};
ShallowReference: {
fields: any;
};
SharedStepModel: {
fields: any;
};
SuiteCreateModel: {
fields: any;
};
SuiteTestCase: {
fields: any;
};
SuiteUpdateModel: {
fields: any;
};
TestActionResultModel: {
fields: any;
};
TestAttachmentReference: {
fields: any;
};
TestAttachmentRequestModel: {
fields: any;
};
TestCaseResult: {
fields: any;
};
TestCaseResult2: {
fields: any;
};
TestCaseResultAttachmentModel: {
fields: any;
};
TestCaseResultIdentifier: {
fields: any;
};
TestCaseResultUpdateModel: {
fields: any;
};
TestConfiguration: {
fields: any;
};
TestConfigurationState: {
enumValues: {
"active": number;
"inactive": number;
};
};
TestEnvironment: {
fields: any;
};
TestFailureDetails: {
fields: any;
};
TestFailuresAnalysis: {
fields: any;
};
TestIterationDetailsModel: {
fields: any;
};
TestMessageLogDetails: {
fields: any;
};
TestOutcome: {
enumValues: {
"unspecified": number;
"none": number;
"passed": number;
"failed": number;
"inconclusive": number;
"timeout": number;
"aborted": number;
"blocked": number;
"notExecuted": number;
"warning": number;
"error": number;
"notApplicable": number;
"paused": number;
"inProgress": number;
"maxValue": number;
};
};
TestPlan: {
fields: any;
};
TestPlanCloneRequest: {
fields: any;
};
TestPlansWithSelection: {
fields: any;
};
TestPoint: {
fields: any;
};
TestReport: {
fields: any;
};
TestResolutionState: {
fields: any;
};
TestResultCreateModel: {
fields: any;
};
TestResultModelBase: {
fields: any;
};
TestResultParameterModel: {
fields: any;
};
TestResultsContext: {
fields: any;
};
TestResultsContextType: {
enumValues: {
"build": number;
"release": number;
};
};
TestResultsDetailsForGroup: {
fields: any;
};
TestResultTrendFilter: {
fields: any;
};
TestRun: {
fields: any;
};
TestRunCoverage: {
fields: any;
};
TestRunState: {
enumValues: {
"unspecified": number;
"notStarted": number;
"inProgress": number;
"completed": number;
"aborted": number;
"waiting": number;
"needsInvestigation": number;
};
};
TestRunStatistic: {
fields: any;
};
TestRunSubstate: {
enumValues: {
"none": number;
"creatingEnvironment": number;
"runningTests": number;
"canceledByUser": number;
"abortedBySystem": number;
"timedOut": number;
"pendingAnalysis": number;
"analyzed": number;
"cancellationInProgress": number;
};
};
TestSettings: {
fields: any;
};
TestSuite: {
fields: any;
};
TestSuiteCloneRequest: {
fields: any;
};
TestVariable: {
fields: any;
};
WorkItemReference: {
fields: any;
};
};
}
declare module "TFS/TestManagement/RestClient" {
import Contracts = require("TFS/TestManagement/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class TestHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestAttachmentRequestModel} attachmentRequestModel
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @return IPromise<Contracts.TestAttachmentReference>
*/
createTestResultAttachment(attachmentRequestModel: Contracts.TestAttachmentRequestModel, project: string, runId: number, testCaseResultId: number): IPromise<Contracts.TestAttachmentReference>;
/**
* @exemptedapi
* [Preview API] Returns a test result attachment
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {number} attachmentId
* @return IPromise<ArrayBuffer>
*/
getTestResultAttachmentContent(project: string, runId: number, testCaseResultId: number, attachmentId: number): IPromise<ArrayBuffer>;
/**
* @exemptedapi
* [Preview API] Returns a test result attachment
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {number} attachmentId
* @return IPromise<ArrayBuffer>
*/
getTestResultAttachmentZip(project: string, runId: number, testCaseResultId: number, attachmentId: number): IPromise<ArrayBuffer>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestAttachmentRequestModel} attachmentRequestModel
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestAttachmentReference>
*/
createTestRunAttachment(attachmentRequestModel: Contracts.TestAttachmentRequestModel, project: string, runId: number): IPromise<Contracts.TestAttachmentReference>;
/**
* @exemptedapi
* [Preview API] Returns a test run attachment
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} attachmentId
* @return IPromise<ArrayBuffer>
*/
getTestRunAttachmentContent(project: string, runId: number, attachmentId: number): IPromise<ArrayBuffer>;
/**
* @exemptedapi
* [Preview API] Returns a test run attachment
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} attachmentId
* @return IPromise<ArrayBuffer>
*/
getTestRunAttachmentZip(project: string, runId: number, attachmentId: number): IPromise<ArrayBuffer>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @return IPromise<Contracts.WorkItemReference[]>
*/
getBugsLinkedToTestResult(project: string, runId: number, testCaseResultId: number): IPromise<Contracts.WorkItemReference[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {number} flags
* @return IPromise<Contracts.BuildCoverage[]>
*/
getBuildCodeCoverage(project: string, buildId: number, flags: number): IPromise<Contracts.BuildCoverage[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {number} deltaBuildId
* @return IPromise<Contracts.CodeCoverageSummary>
*/
getCodeCoverageSummary(project: string, buildId: number, deltaBuildId?: number): IPromise<Contracts.CodeCoverageSummary>;
/**
* @exemptedapi
* [Preview API] http://(tfsserver):8080/tfs/DefaultCollection/_apis/test/CodeCoverage?buildId=10 Request: Json of code coverage summary
*
* @param {Contracts.CodeCoverageData} coverageData
* @param {string} project - Project ID or project name
* @param {number} buildId
* @return IPromise<void>
*/
updateCodeCoverageSummary(coverageData: Contracts.CodeCoverageData, project: string, buildId: number): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} flags
* @return IPromise<Contracts.TestRunCoverage[]>
*/
getTestRunCodeCoverage(project: string, runId: number, flags: number): IPromise<Contracts.TestRunCoverage[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestConfiguration} testConfiguration
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.TestConfiguration>
*/
createTestConfiguration(testConfiguration: Contracts.TestConfiguration, project: string): IPromise<Contracts.TestConfiguration>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} testConfigurationId
* @return IPromise<void>
*/
deleteTestConfiguration(project: string, testConfigurationId: number): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} testConfigurationId
* @return IPromise<Contracts.TestConfiguration>
*/
getTestConfigurationById(project: string, testConfigurationId: number): IPromise<Contracts.TestConfiguration>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestConfiguration[]>
*/
getTestConfigurations(project: string, skip?: number, top?: number): IPromise<Contracts.TestConfiguration[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestConfiguration} testConfiguration
* @param {string} project - Project ID or project name
* @param {number} testConfigurationId
* @return IPromise<Contracts.TestConfiguration>
*/
updateTestConfiguration(testConfiguration: Contracts.TestConfiguration, project: string, testConfigurationId: number): IPromise<Contracts.TestConfiguration>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.CustomTestFieldDefinition[]} newFields
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.CustomTestFieldDefinition[]>
*/
addCustomFields(newFields: Contracts.CustomTestFieldDefinition[], project: string): IPromise<Contracts.CustomTestFieldDefinition[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {Contracts.CustomTestFieldScope} scopeFilter
* @return IPromise<Contracts.CustomTestFieldDefinition[]>
*/
queryCustomFields(project: string, scopeFilter: Contracts.CustomTestFieldScope): IPromise<Contracts.CustomTestFieldDefinition[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestMessageLogDetails[]>
*/
getTestRunLogs(project: string, runId: number): IPromise<Contracts.TestMessageLogDetails[]>;
/**
* @param {Contracts.PlanUpdateModel} testPlan
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.TestPlan>
*/
createTestPlan(testPlan: Contracts.PlanUpdateModel, project: string): IPromise<Contracts.TestPlan>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @return IPromise<void>
*/
deleteTestPlan(project: string, planId: number): IPromise<void>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @return IPromise<Contracts.TestPlan>
*/
getPlanById(project: string, planId: number): IPromise<Contracts.TestPlan>;
/**
* @param {string} project - Project ID or project name
* @param {string} owner
* @param {number} skip
* @param {number} top
* @param {boolean} includePlanDetails
* @param {boolean} filterActivePlans
* @return IPromise<Contracts.TestPlan[]>
*/
getPlans(project: string, owner?: string, skip?: number, top?: number, includePlanDetails?: boolean, filterActivePlans?: boolean): IPromise<Contracts.TestPlan[]>;
/**
* @param {Contracts.PlanUpdateModel} planUpdateModel
* @param {string} project - Project ID or project name
* @param {number} planId
* @return IPromise<Contracts.TestPlan>
*/
updateTestPlan(planUpdateModel: Contracts.PlanUpdateModel, project: string, planId: number): IPromise<Contracts.TestPlan>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} operationId
* @param {boolean} includeDetails
* @return IPromise<Contracts.CloneOperationInformation>
*/
getPlanCloneInformation(project: string, operationId: number, includeDetails?: boolean): IPromise<Contracts.CloneOperationInformation>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestPlanCloneRequest} cloneRequestBody
* @param {string} project - Project ID or project name
* @param {number} sourcePlanId
* @return IPromise<Contracts.CloneOperationInformation>
*/
cloneTestPlan(cloneRequestBody: Contracts.TestPlanCloneRequest, project: string, sourcePlanId: number): IPromise<Contracts.CloneOperationInformation>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {number} pointIds
* @param {string} witFields
* @return IPromise<Contracts.TestPoint>
*/
getPoint(project: string, planId: number, suiteId: number, pointIds: number, witFields?: string): IPromise<Contracts.TestPoint>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {string} witFields
* @param {string} configurationId
* @param {string} testCaseId
* @param {string} testPointIds
* @param {boolean} includePointDetails
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestPoint[]>
*/
getPoints(project: string, planId: number, suiteId: number, witFields?: string, configurationId?: string, testCaseId?: string, testPointIds?: string, includePointDetails?: boolean, skip?: number, top?: number): IPromise<Contracts.TestPoint[]>;
/**
* @param {Contracts.PointUpdateModel} pointUpdateModel
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {string} pointIds
* @return IPromise<Contracts.TestPoint[]>
*/
updateTestPoints(pointUpdateModel: Contracts.PointUpdateModel, project: string, planId: number, suiteId: number, pointIds: string): IPromise<Contracts.TestPoint[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} testRunId
* @param {number} testResultId
* @param {number} recentDays
* @return IPromise<Contracts.WorkItemReference[]>
*/
queryTestResultRecentBugs(project: string, testRunId: number, testResultId: number, recentDays?: number): IPromise<Contracts.WorkItemReference[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} sourceWorkflow
* @param {boolean} includeFailureDetails
* @param {Contracts.BuildReference} buildToCompare
* @return IPromise<Contracts.TestReport>
*/
queryTestResultsReportForBuild(project: string, buildId: number, sourceWorkflow: string, includeFailureDetails?: boolean, buildToCompare?: Contracts.BuildReference): IPromise<Contracts.TestReport>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} releaseId
* @param {number} releaseEnvId
* @param {string} sourceWorkflow
* @param {boolean} includeFailureDetails
* @param {Contracts.ReleaseReference} releaseToCompare
* @return IPromise<Contracts.TestReport>
*/
queryTestResultsReportForRelease(project: string, releaseId: number, releaseEnvId: number, sourceWorkflow: string, includeFailureDetails?: boolean, releaseToCompare?: Contracts.ReleaseReference): IPromise<Contracts.TestReport>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} buildId
* @param {string} sourceWorkflow
* @param {string} groupBy
* @param {string} filter
* @return IPromise<Contracts.AggregatedResultsWithDetails>
*/
getTestResultDetailsForBuild(project: string, buildId: number, sourceWorkflow: string, groupBy?: string, filter?: string): IPromise<Contracts.AggregatedResultsWithDetails>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} releaseId
* @param {number} releaseEnvId
* @param {string} sourceWorkflow
* @param {string} groupBy
* @param {string} filter
* @return IPromise<Contracts.AggregatedResultsWithDetails>
*/
getTestResultDetailsForRelease(project: string, releaseId: number, releaseEnvId: number, sourceWorkflow: string, groupBy?: string, filter?: string): IPromise<Contracts.AggregatedResultsWithDetails>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.ResultRetentionSettings} retentionSettings
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.ResultRetentionSettings>
*/
createResultRetentionSettings(retentionSettings: Contracts.ResultRetentionSettings, project: string): IPromise<Contracts.ResultRetentionSettings>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @return IPromise<void>
*/
deleteResultRetentionSettings(project: string): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.ResultRetentionSettings>
*/
getResultRetentionSettings(project: string): IPromise<Contracts.ResultRetentionSettings>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.ResultRetentionSettings} retentionSettings
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.ResultRetentionSettings>
*/
updateResultRetentionSettings(retentionSettings: Contracts.ResultRetentionSettings, project: string): IPromise<Contracts.ResultRetentionSettings>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestResultCreateModel[]} resultCreateModels
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestCaseResult[]>
*/
addTestResultsToTestRun(resultCreateModels: Contracts.TestResultCreateModel[], project: string, runId: number): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestCaseResultUpdateModel} resultUpdateModel
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number[]} resultIds
* @return IPromise<Contracts.TestCaseResult[]>
*/
bulkUpdateTestResults(resultUpdateModel: Contracts.TestCaseResultUpdateModel, project: string, runId: number, resultIds: number[]): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {boolean} includeIterationDetails
* @param {boolean} includeAssociatedBugs
* @return IPromise<Contracts.TestCaseResult>
*/
getTestCaseResultById(project: string, runId: number, testCaseResultId: number, includeIterationDetails: boolean, includeAssociatedBugs?: boolean): IPromise<Contracts.TestCaseResult>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {boolean} includeIterationDetails
* @return IPromise<Contracts.TestCaseResult[]>
*/
getTestCaseResults(project: string, runId: number, includeIterationDetails: boolean): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {Contracts.ResultDetails} detailsToInclude
* @return IPromise<Contracts.TestCaseResult[]>
*/
getTestResultById(project: string, runId: number, testCaseResultId: number, detailsToInclude?: Contracts.ResultDetails): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {Contracts.ResultDetails} detailsToInclude
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestCaseResult[]>
*/
getTestResults(project: string, runId: number, detailsToInclude?: Contracts.ResultDetails, skip?: number, top?: number): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestCaseResultUpdateModel[]} resultUpdateModels
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestCaseResult[]>
*/
updateTestResults(resultUpdateModels: Contracts.TestCaseResultUpdateModel[], project: string, runId: number): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestCaseResultIdentifier[]} ids
* @param {string} project - Project ID or project name
* @param {string[]} fields
* @return IPromise<Contracts.TestCaseResult[]>
*/
getTestResultsByIds(ids: Contracts.TestCaseResultIdentifier[], project: string, fields: string[]): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.QueryModel} query
* @param {string} project - Project ID or project name
* @param {boolean} includeResultDetails
* @param {boolean} includeIterationDetails
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestCaseResult[]>
*/
getTestResultsByQuery(query: Contracts.QueryModel, project: string, includeResultDetails?: boolean, includeIterationDetails?: boolean, skip?: number, top?: number): IPromise<Contracts.TestCaseResult[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {number} iterationId
* @param {boolean} includeActionResults
* @return IPromise<Contracts.TestIterationDetailsModel>
*/
getTestIteration(project: string, runId: number, testCaseResultId: number, iterationId: number, includeActionResults?: boolean): IPromise<Contracts.TestIterationDetailsModel>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {boolean} includeActionResults
* @return IPromise<Contracts.TestIterationDetailsModel[]>
*/
getTestIterations(project: string, runId: number, testCaseResultId: number, includeActionResults?: boolean): IPromise<Contracts.TestIterationDetailsModel[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {number} iterationId
* @param {string} actionPath
* @return IPromise<Contracts.TestActionResultModel[]>
*/
getActionResults(project: string, runId: number, testCaseResultId: number, iterationId: number, actionPath?: string): IPromise<Contracts.TestActionResultModel[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @param {number} testCaseResultId
* @param {number} iterationId
* @param {string} paramName
* @return IPromise<Contracts.TestResultParameterModel[]>
*/
getResultParameters(project: string, runId: number, testCaseResultId: number, iterationId: number, paramName?: string): IPromise<Contracts.TestResultParameterModel[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} testRunId
* @param {number} testResultId
* @param {number} historyDays
* @param {number} top
* @return IPromise<Contracts.TestCaseResult[]>
*/
queryTestResultTrendReport(project: string, testRunId: number, testResultId: number, historyDays?: number, top?: number): IPromise<Contracts.TestCaseResult[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestResultTrendFilter} filter
* @param {string} project - Project ID or project name
* @param {number} buildCount
* @return IPromise<Contracts.AggregatedResultsForBuild[]>
*/
queryResultTrendForBuild(filter: Contracts.TestResultTrendFilter, project: string, buildCount?: number): IPromise<Contracts.AggregatedResultsForBuild[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestRunStatistic>
*/
getTestRunStatistics(project: string, runId: number): IPromise<Contracts.TestRunStatistic>;
/**
* @param {Contracts.RunCreateModel} testRun
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.TestRun>
*/
createTestRun(testRun: Contracts.RunCreateModel, project: string): IPromise<Contracts.TestRun>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<void>
*/
deleteTestRun(project: string, runId: number): IPromise<void>;
/**
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestRun>
*/
getTestRunById(project: string, runId: number): IPromise<Contracts.TestRun>;
/**
* @param {string} project - Project ID or project name
* @param {string} buildUri
* @param {string} owner
* @param {string} tmiRunId
* @param {number} planId
* @param {boolean} includeRunDetails
* @param {boolean} automated
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestRun[]>
*/
getTestRuns(project: string, buildUri?: string, owner?: string, tmiRunId?: string, planId?: number, includeRunDetails?: boolean, automated?: boolean, skip?: number, top?: number): IPromise<Contracts.TestRun[]>;
/**
* @param {Contracts.RunUpdateModel} runUpdateModel
* @param {string} project - Project ID or project name
* @param {number} runId
* @return IPromise<Contracts.TestRun>
*/
updateTestRun(runUpdateModel: Contracts.RunUpdateModel, project: string, runId: number): IPromise<Contracts.TestRun>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.QueryModel} query
* @param {string} project - Project ID or project name
* @param {boolean} includeRunDetails
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestRun[]>
*/
getTestRunsByQuery(query: Contracts.QueryModel, project: string, includeRunDetails?: boolean, skip?: number, top?: number): IPromise<Contracts.TestRun[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {string} testCaseIds
* @return IPromise<Contracts.SuiteTestCase[]>
*/
addTestCasesToSuite(project: string, planId: number, suiteId: number, testCaseIds: string): IPromise<Contracts.SuiteTestCase[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {number} testCaseIds
* @return IPromise<Contracts.SuiteTestCase>
*/
getTestCaseById(project: string, planId: number, suiteId: number, testCaseIds: number): IPromise<Contracts.SuiteTestCase>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @return IPromise<Contracts.SuiteTestCase[]>
*/
getTestCases(project: string, planId: number, suiteId: number): IPromise<Contracts.SuiteTestCase[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {string} testCaseIds
* @return IPromise<void>
*/
removeTestCasesFromSuiteUrl(project: string, planId: number, suiteId: number, testCaseIds: string): IPromise<void>;
/**
* @param {Contracts.SuiteCreateModel} testSuite
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @return IPromise<Contracts.TestSuite[]>
*/
createTestSuite(testSuite: Contracts.SuiteCreateModel, project: string, planId: number, suiteId: number): IPromise<Contracts.TestSuite[]>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @return IPromise<void>
*/
deleteTestSuite(project: string, planId: number, suiteId: number): IPromise<void>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @param {boolean} includeChildSuites
* @return IPromise<Contracts.TestSuite>
*/
getTestSuiteById(project: string, planId: number, suiteId: number, includeChildSuites?: boolean): IPromise<Contracts.TestSuite>;
/**
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {boolean} includeSuites
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestSuite[]>
*/
getTestSuitesForPlan(project: string, planId: number, includeSuites?: boolean, skip?: number, top?: number): IPromise<Contracts.TestSuite[]>;
/**
* @param {Contracts.SuiteUpdateModel} suiteUpdateModel
* @param {string} project - Project ID or project name
* @param {number} planId
* @param {number} suiteId
* @return IPromise<Contracts.TestSuite>
*/
updateTestSuite(suiteUpdateModel: Contracts.SuiteUpdateModel, project: string, planId: number, suiteId: number): IPromise<Contracts.TestSuite>;
/**
* @param {number} testCaseId
* @return IPromise<Contracts.TestSuite[]>
*/
getSuitesByTestCaseId(testCaseId: number): IPromise<Contracts.TestSuite[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} operationId
* @param {boolean} includeDetails
* @return IPromise<Contracts.CloneOperationInformation>
*/
getSuiteCloneInformation(project: string, operationId: number, includeDetails?: boolean): IPromise<Contracts.CloneOperationInformation>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestSuiteCloneRequest} cloneRequestBody
* @param {string} project - Project ID or project name
* @param {number} sourceSuiteId
* @param {number} planId
* @return IPromise<Contracts.CloneOperationInformation>
*/
cloneTestSuite(cloneRequestBody: Contracts.TestSuiteCloneRequest, project: string, sourceSuiteId: number, planId: number): IPromise<Contracts.CloneOperationInformation>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.BuildReference} build
* @param {string} project - Project ID or project name
* @param {string} sourceWorkflow
* @param {Contracts.BuildReference} buildToCompare
* @return IPromise<Contracts.TestFailuresAnalysis>
*/
queryFailureDetailsForBuild(build: Contracts.BuildReference, project: string, sourceWorkflow: string, buildToCompare: Contracts.BuildReference): IPromise<Contracts.TestFailuresAnalysis>;
/**
* @param {Contracts.TestSettings} testSettings
* @param {string} project - Project ID or project name
* @return IPromise<number>
*/
createTestSettings(testSettings: Contracts.TestSettings, project: string): IPromise<number>;
/**
* @param {string} project - Project ID or project name
* @param {number} testSettingsId
* @return IPromise<void>
*/
deleteTestSettings(project: string, testSettingsId: number): IPromise<void>;
/**
* @param {string} project - Project ID or project name
* @param {number} testSettingsId
* @return IPromise<Contracts.TestSettings>
*/
getTestSettingsById(project: string, testSettingsId: number): IPromise<Contracts.TestSettings>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestVariable} testVariable
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.TestVariable>
*/
createTestVariable(testVariable: Contracts.TestVariable, project: string): IPromise<Contracts.TestVariable>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} testVariableId
* @return IPromise<void>
*/
deleteTestVariable(project: string, testVariableId: number): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} testVariableId
* @return IPromise<Contracts.TestVariable>
*/
getTestVariable(project: string, testVariableId: number): IPromise<Contracts.TestVariable>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.TestVariable[]>
*/
getTestVariables(project: string, skip?: number, top?: number): IPromise<Contracts.TestVariable[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.TestVariable} testVariable
* @param {string} project - Project ID or project name
* @param {number} testVariableId
* @return IPromise<Contracts.TestVariable>
*/
updateTestVariable(testVariable: Contracts.TestVariable, project: string, testVariableId: number): IPromise<Contracts.TestVariable>;
}
export class TestHttpClient extends TestHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return TestHttpClient2_2
*/
export function getClient(): TestHttpClient2_2;
}
declare module "TFS/VersionControl/Contracts" {
import TFS_Core_Contracts = require("TFS/Core/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
export interface AssociatedWorkItem {
assignedTo: string;
id: number;
state: string;
title: string;
/**
* REST url
*/
url: string;
webUrl: string;
workItemType: string;
}
export interface Change<T> {
changeType: VersionControlChangeType;
item: T;
newContent: ItemContent;
sourceServerItem: string;
url: string;
}
export interface ChangeCountDictionary {
}
export interface ChangeList<T> {
allChangesIncluded: boolean;
changeCounts: {
[key: string]: number;
};
changes: Change<T>[];
comment: string;
commentTruncated: boolean;
creationDate: Date;
notes: CheckinNote[];
owner: string;
ownerDisplayName: string;
ownerId: string;
sortDate: Date;
version: string;
}
/**
* Criteria used in a search for change lists
*/
export interface ChangeListSearchCriteria {
/**
* If provided, a version descriptor to compare against base
*/
compareVersion: string;
/**
* If true, don't include delete history entries
*/
excludeDeletes: boolean;
/**
* Whether or not to follow renames for the given item being queried
*/
followRenames: boolean;
/**
* If provided, only include history entries created after this date (string)
*/
fromDate: string;
/**
* If provided, a version descriptor for the earliest change list to include
*/
fromVersion: string;
/**
* Path of item to search under
*/
itemPath: string;
/**
* Version of the items to search
*/
itemVersion: string;
/**
* Number of results to skip (used when clicking more...)
*/
skip: number;
/**
* If provided, only include history entries created before this date (string)
*/
toDate: string;
/**
* If provided, the maximum number of history entries to return
*/
top: number;
/**
* If provided, a version descriptor for the latest change list to include
*/
toVersion: string;
/**
* Alias or display name of user who made the changes
*/
user: string;
}
export interface CheckinNote {
name: string;
value: string;
}
export interface FileContentMetadata {
contentType: string;
encoding: number;
extension: string;
fileName: string;
isBinary: boolean;
isImage: boolean;
vsLink: string;
}
export interface GitBaseVersionDescriptor extends GitVersionDescriptor {
/**
* Version string identifier (name of tag/branch, SHA1 of commit)
*/
baseVersion: string;
/**
* Version options - Specify additional modifiers to version (e.g Previous)
*/
baseVersionOptions: GitVersionOptions;
/**
* Version type (branch, tag, or commit). Determines how Id is interpreted
*/
baseVersionType: GitVersionType;
}
export interface GitBlobRef {
_links: any;
/**
* SHA1 hash of git object
*/
objectId: string;
/**
* Size of blob content (in bytes)
*/
size: number;
url: string;
}
export interface GitBranchStats {
aheadCount: number;
behindCount: number;
commit: GitCommitRef;
isBaseVersion: boolean;
name: string;
}
export interface GitChange extends Change<GitItem> {
}
export interface GitCommit extends GitCommitRef {
push: GitPushRef;
treeId: string;
}
export interface GitCommitChanges {
changeCounts: ChangeCountDictionary;
changes: GitChange[];
}
export interface GitCommitDiffs {
aheadCount: number;
allChangesIncluded: boolean;
baseCommit: string;
behindCount: number;
changeCounts: {
[key: string]: number;
};
changes: GitChange[];
commonCommit: string;
targetCommit: string;
}
export interface GitCommitRef {
_links: any;
author: GitUserDate;
changeCounts: ChangeCountDictionary;
changes: GitChange[];
comment: string;
commentTruncated: boolean;
commitId: string;
committer: GitUserDate;
parents: string[];
remoteUrl: string;
url: string;
}
export interface GitCommitToCreate {
baseRef: GitRef;
comment: string;
pathActions: GitPathAction[];
}
export interface GitDeletedRepository {
createdDate: Date;
deletedBy: VSS_Common_Contracts.IdentityRef;
deletedDate: Date;
id: string;
name: string;
project: TFS_Core_Contracts.TeamProjectReference;
}
export interface GitHistoryQueryResults extends HistoryQueryResults<GitItem> {
/**
* Seed commit used for querying history. Used for skip feature.
*/
startingCommitId: string;
unpopulatedCount: number;
unprocessedCount: number;
}
export interface GitItem extends ItemModel {
/**
* SHA1 of commit item was fetched at
*/
commitId: string;
/**
* Type of object (Commit, Tree, Blob, Tag, ...)
*/
gitObjectType: GitObjectType;
/**
* Shallow ref to commit that last changed this item Only populated if latestProcessedChange is requested May not be accurate if latest change is not yet cached
*/
latestProcessedChange: GitCommitRef;
/**
* Git object id
*/
objectId: string;
/**
* Git object id
*/
originalObjectId: string;
}
export interface GitItemDescriptor {
/**
* Path to item
*/
path: string;
/**
* Specifies whether to include children (OneLevel), all descendants (Full), or None
*/
recursionLevel: VersionControlRecursionType;
/**
* Version string (interpretation based on VersionType defined in subclass
*/
version: string;
/**
* Version modifiers (e.g. previous)
*/
versionOptions: GitVersionOptions;
/**
* How to interpret version (branch,tag,commit)
*/
versionType: GitVersionType;
}
export interface GitItemRequestData {
/**
* Whether to include metadata for all items
*/
includeContentMetadata: boolean;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
/**
* Collection of items to fetch, including path, version, and recursion level
*/
itemDescriptors: GitItemDescriptor[];
/**
* Whether to include shallow ref to commit that last changed each item
*/
latestProcessedChange: boolean;
}
/**
* Encapsulates the reference metadata of a Git media object.
*/
export interface GitMediaObjectRef {
/**
* Gets or sets the reference links of the Git media object.
*/
_links: any;
/**
* Gets or sets the Git media object identifier. This Id property duplicates the Oid property, but is required by the VSTS REST specification.
*/
id: string;
/**
* Gets or sets the Git media object identifier. This property exists for adherence to the GitHub Git Media contract.
*/
oid: string;
/**
* Gets or sets the size of the Git media object in bytes. This property exists for adherence to the GitHub Git Media contract.
*/
size: number;
/**
* Gets or sets the URL for the Git media object.
*/
url: string;
}
export enum GitObjectType {
Bad = 0,
Commit = 1,
Tree = 2,
Blob = 3,
Tag = 4,
Ext2 = 5,
OfsDelta = 6,
RefDelta = 7,
}
export interface GitPathAction {
action: GitPathActions;
base64Content: string;
path: string;
rawTextContent: string;
targetPath: string;
}
export enum GitPathActions {
None = 0,
Edit = 1,
Delete = 2,
Add = 3,
Rename = 4,
}
export interface GitPullRequest {
_links: any;
closedDate: Date;
codeReviewId: number;
commits: GitCommitRef[];
completionOptions: GitPullRequestCompletionOptions;
completionQueueTime: Date;
createdBy: VSS_Common_Contracts.IdentityRef;
creationDate: Date;
description: string;
lastMergeCommit: GitCommitRef;
lastMergeSourceCommit: GitCommitRef;
lastMergeTargetCommit: GitCommitRef;
mergeId: string;
mergeStatus: PullRequestAsyncStatus;
pullRequestId: number;
remoteUrl: string;
repository: GitRepository;
reviewers: IdentityRefWithVote[];
sourceRefName: string;
status: PullRequestStatus;
targetRefName: string;
title: string;
upgraded: boolean;
url: string;
workItemRefs: VSS_Common_Contracts.ResourceRef[];
}
export interface GitPullRequestCompletionOptions {
deleteSourceBranch: boolean;
mergeCommitMessage: string;
squashMerge: boolean;
}
export interface GitPullRequestSearchCriteria {
creatorId: string;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
repositoryId: string;
reviewerId: string;
sourceRefName: string;
status: PullRequestStatus;
targetRefName: string;
}
export interface GitPush extends GitPushRef {
commits: GitCommitRef[];
refUpdates: GitRefUpdate[];
repository: GitRepository;
}
export interface GitPushEventData {
afterId: string;
beforeId: string;
branch: string;
commits: GitCommit[];
repository: GitRepository;
}
export interface GitPushRef {
_links: any;
date: Date;
pushCorrelationId: string;
pushedBy: VSS_Common_Contracts.IdentityRef;
pushId: number;
url: string;
}
export interface GitPushSearchCriteria {
fromDate: Date;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
includeRefUpdates: boolean;
pusherId: string;
refName: string;
toDate: Date;
}
export interface GitQueryCommitsCriteria {
/**
* Number of entries to skip
*/
$skip: number;
/**
* Maximum number of entries to retrieve
*/
$top: number;
/**
* Alias or display name of the author
*/
author: string;
/**
* If provided, the earliest commit in the graph to search
*/
compareVersion: GitVersionDescriptor;
/**
* If true, don't include delete history entries
*/
excludeDeletes: boolean;
/**
* If provided, a lower bound for filtering commits alphabetically
*/
fromCommitId: string;
/**
* If provided, only include history entries created after this date (string)
*/
fromDate: string;
/**
* If provided, specifies the exact commit ids of the commits to fetch. May not be combined with other parameters.
*/
ids: string[];
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
/**
* Path of item to search under
*/
itemPath: string;
/**
* If provided, identifies the commit or branch to search
*/
itemVersion: GitVersionDescriptor;
/**
* If provided, an upper bound for filtering commits alphabetically
*/
toCommitId: string;
/**
* If provided, only include history entries created before this date (string)
*/
toDate: string;
/**
* Alias or display name of the committer
*/
user: string;
}
export interface GitRef {
_links: any;
isLockedBy: VSS_Common_Contracts.IdentityRef;
name: string;
objectId: string;
statuses: GitStatus[];
url: string;
}
export interface GitRefUpdate {
name: string;
newObjectId: string;
oldObjectId: string;
repositoryId: string;
}
export enum GitRefUpdateMode {
/**
* Indicates the Git protocol model where any refs that can be updated will be updated, but any failures will not prevent other updates from succeeding.
*/
BestEffort = 0,
/**
* Indicates that all ref updates must succeed or none will succeed. All ref updates will be atomically written. If any failure is encountered, previously successful updates will be rolled back and the entire operation will fail.
*/
AllOrNone = 1,
}
export interface GitRefUpdateResult {
/**
* Custom message for the result object For instance, Reason for failing.
*/
customMessage: string;
/**
* Ref name
*/
name: string;
/**
* New object ID
*/
newObjectId: string;
/**
* Old object ID
*/
oldObjectId: string;
/**
* Name of the plugin that rejected the updated.
*/
rejectedBy: string;
/**
* Repository ID
*/
repositoryId: string;
/**
* True if the ref update succeeded, false otherwise
*/
success: boolean;
/**
* Status of the update from the TFS server.
*/
updateStatus: GitRefUpdateStatus;
}
export interface GitRefUpdateResultSet {
countFailed: number;
countSucceeded: number;
pushCorrelationId: string;
pushIds: {
[key: string]: number;
};
pushTime: Date;
results: GitRefUpdateResult[];
}
export enum GitRefUpdateStatus {
/**
* Indicates that the ref update request was completed successfully.
*/
Succeeded = 0,
/**
* Indicates that the ref update request could not be completed because part of the graph would be disconnected by this change, and the caller does not have ForcePush permission on the repository.
*/
ForcePushRequired = 1,
/**
* Indicates that the ref update request could not be completed because the old object ID presented in the request was not the object ID of the ref when the database attempted the update. The most likely scenario is that the caller lost a race to update the ref.
*/
StaleOldObjectId = 2,
/**
* Indicates that the ref update request could not be completed because the ref name presented in the request was not valid.
*/
InvalidRefName = 3,
/**
* The request was not processed
*/
Unprocessed = 4,
/**
* The ref update request could not be completed because the new object ID for the ref could not be resolved to a commit object (potentially through any number of tags)
*/
UnresolvableToCommit = 5,
/**
* The ref update request could not be completed because the user lacks write permissions required to write this ref
*/
WritePermissionRequired = 6,
/**
* The ref update request could not be completed because the user lacks note creation permissions required to write this note
*/
ManageNotePermissionRequired = 7,
/**
* The ref update request could not be completed because the user lacks the permission to create a branch
*/
CreateBranchPermissionRequired = 8,
/**
* The ref update request could not be completed because the user lacks the permission to create a tag
*/
CreateTagPermissionRequired = 9,
/**
* The ref update could not be completed because it was rejected by the plugin.
*/
RejectedByPlugin = 10,
/**
* The ref update could not be completed because the ref is locked by another user.
*/
Locked = 11,
/**
* The ref update could not be completed because, in case-insensitive mode, the ref name conflicts with an existing, differently-cased ref name.
*/
RefNameConflict = 12,
/**
* The ref update could not be completed because it was rejected by policy.
*/
RejectedByPolicy = 13,
/**
* Indicates that the ref update request was completed successfully, but the ref doesn't actually exist so no changes were made. This should only happen during deletes.
*/
SucceededNonExistentRef = 14,
/**
* Indicates that the ref update request was completed successfully, but the passed-in ref was corrupt - as in, the old object ID was bad. This should only happen during deletes.
*/
SucceededCorruptRef = 15,
}
export interface GitRepository {
_links: any;
defaultBranch: string;
id: string;
name: string;
project: TFS_Core_Contracts.TeamProjectReference;
remoteUrl: string;
url: string;
}
export enum GitRepositoryPermissions {
None = 0,
Administer = 1,
GenericRead = 2,
GenericContribute = 4,
ForcePush = 8,
CreateBranch = 16,
CreateTag = 32,
ManageNote = 64,
PolicyExempt = 128,
/**
* This defines the set of bits that are valid for the git permission space. When reading or writing git permissions, these are the only bits paid attention too.
*/
All = 255,
BranchLevelPermissions = 141,
}
export interface GitStatus {
_links: any;
context: GitStatusContext;
createdBy: VSS_Common_Contracts.IdentityRef;
creationDate: Date;
description: string;
state: GitStatusState;
targetUrl: string;
}
export interface GitStatusContext {
genre: string;
name: string;
}
export enum GitStatusState {
NotSet = 0,
Pending = 1,
Succeeded = 2,
Failed = 3,
Error = 4,
}
export interface GitSuggestion {
properties: {
[key: string]: any;
};
type: string;
}
export interface GitTargetVersionDescriptor extends GitVersionDescriptor {
/**
* Version string identifier (name of tag/branch, SHA1 of commit)
*/
targetVersion: string;
/**
* Version options - Specify additional modifiers to version (e.g Previous)
*/
targetVersionOptions: GitVersionOptions;
/**
* Version type (branch, tag, or commit). Determines how Id is interpreted
*/
targetVersionType: GitVersionType;
}
export interface GitTreeEntryRef {
/**
* Blob or tree
*/
gitObjectType: GitObjectType;
/**
* Mode represented as octal string
*/
mode: string;
/**
* SHA1 hash of git object
*/
objectId: string;
/**
* Path relative to parent tree object
*/
relativePath: string;
/**
* Size of content
*/
size: number;
/**
* url to retrieve tree or blob
*/
url: string;
}
export interface GitTreeRef {
_links: any;
/**
* SHA1 hash of git object
*/
objectId: string;
/**
* Sum of sizes of all children
*/
size: number;
/**
* Blobs and trees under this tree
*/
treeEntries: GitTreeEntryRef[];
/**
* Url to tree
*/
url: string;
}
export interface GitUserDate {
date: Date;
email: string;
name: string;
}
export interface GitVersionDescriptor {
/**
* Version string identifier (name of tag/branch/index, SHA1 of commit)
*/
version: string;
/**
* Version options - Specify additional modifiers to version (e.g Previous)
*/
versionOptions: GitVersionOptions;
/**
* Version type (branch, tag, commit, or index). Determines how Id is interpreted
*/
versionType: GitVersionType;
}
export enum GitVersionOptions {
/**
* Not specified
*/
None = 0,
/**
* Commit that changed item prior to the current version
*/
PreviousChange = 1,
/**
* First parent of commit (HEAD^)
*/
FirstParent = 2,
}
export enum GitVersionType {
/**
* Interpret the version as a branch name
*/
Branch = 0,
/**
* Interpret the version as a tag name
*/
Tag = 1,
/**
* Interpret the version as a commit ID (SHA1)
*/
Commit = 2,
/**
* Interpret the version as an index name
*/
Index = 3,
}
export interface HistoryEntry<T> {
/**
* The Change list (changeset/commit/shelveset) for this point in history
*/
changeList: ChangeList<T>;
/**
* The change made to the item from this change list (only relevant for File history, not folders)
*/
itemChangeType: VersionControlChangeType;
/**
* The path of the item at this point in history (only relevant for File history, not folders)
*/
serverItem: string;
}
export interface HistoryQueryResults<T> {
/**
* True if there are more results available to fetch (we're returning the max # of items requested) A more RESTy solution would be to include a Link header
*/
moreResultsAvailable: boolean;
/**
* The history entries (results) from this query
*/
results: HistoryEntry<T>[];
}
export interface IdentityRefWithVote extends VSS_Common_Contracts.IdentityRef {
isRequired: boolean;
reviewerUrl: string;
vote: number;
votedFor: IdentityRefWithVote[];
}
export interface IncludedGitCommit {
commitId: string;
commitTime: Date;
parentCommitIds: string[];
repositoryId: string;
}
export interface ItemContent {
content: string;
contentType: ItemContentType;
}
export enum ItemContentType {
RawText = 0,
Base64Encoded = 1,
}
/**
* Optional details to include when returning an item model
*/
export interface ItemDetailsOptions {
/**
* If true, include metadata about the file type
*/
includeContentMetadata: boolean;
/**
* Specifies whether to include children (OneLevel), all descendants (Full) or None for folder items
*/
recursionLevel: VersionControlRecursionType;
}
export interface ItemModel {
_links: any;
contentMetadata: FileContentMetadata;
isFolder: boolean;
isSymLink: boolean;
path: string;
url: string;
}
export enum PullRequestAsyncStatus {
NotSet = 0,
Queued = 1,
Conflicts = 2,
Succeeded = 3,
RejectedByPolicy = 4,
Failure = 5,
}
export enum PullRequestStatus {
NotSet = 0,
Active = 1,
Abandoned = 2,
Completed = 3,
All = 4,
}
export interface TfvcBranch extends TfvcBranchRef {
children: TfvcBranch[];
mappings: TfvcBranchMapping[];
parent: TfvcShallowBranchRef;
relatedBranches: TfvcShallowBranchRef[];
}
export interface TfvcBranchMapping {
depth: string;
serverItem: string;
type: string;
}
export interface TfvcBranchRef extends TfvcShallowBranchRef {
_links: any;
createdDate: Date;
description: string;
isDeleted: boolean;
owner: VSS_Common_Contracts.IdentityRef;
url: string;
}
export interface TfvcChange extends Change<TfvcItem> {
/**
* List of merge sources in case of rename or branch creation.
*/
mergeSources: TfvcMergeSource[];
/**
* Version at which a (shelved) change was pended against
*/
pendingVersion: number;
}
export interface TfvcChangeset extends TfvcChangesetRef {
accountId: string;
changes: TfvcChange[];
checkinNotes: CheckinNote[];
collectionId: string;
hasMoreChanges: boolean;
policyOverride: TfvcPolicyOverrideInfo;
teamProjectIds: string[];
workItems: AssociatedWorkItem[];
}
export interface TfvcChangesetRef {
_links: any;
author: VSS_Common_Contracts.IdentityRef;
changesetId: number;
checkedInBy: VSS_Common_Contracts.IdentityRef;
comment: string;
commentTruncated: boolean;
createdDate: Date;
url: string;
}
/**
* Criteria used in a search for change lists
*/
export interface TfvcChangesetSearchCriteria {
/**
* Alias or display name of user who made the changes
*/
author: string;
/**
* Whether or not to follow renames for the given item being queried
*/
followRenames: boolean;
/**
* If provided, only include changesets created after this date (string) Think of a better name for this.
*/
fromDate: string;
/**
* If provided, only include changesets after this changesetID
*/
fromId: number;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
/**
* Path of item to search under
*/
path: string;
/**
* If provided, only include changesets created before this date (string) Think of a better name for this.
*/
toDate: string;
/**
* If provided, a version descriptor for the latest change list to include
*/
toId: number;
}
export interface TfvcChangesetsRequestData {
changesetIds: number[];
commentLength: number;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
}
export interface TfvcCheckinEventData {
changeset: TfvcChangeset;
project: TFS_Core_Contracts.TeamProjectReference;
}
export interface TfvcHistoryEntry extends HistoryEntry<TfvcItem> {
/**
* The encoding of the item at this point in history (only relevant for File history, not folders)
*/
encoding: number;
/**
* The file id of the item at this point in history (only relevant for File history, not folders)
*/
fileId: number;
}
export interface TfvcItem extends ItemModel {
changeDate: Date;
deletionId: number;
/**
* MD5 hash as a base 64 string, applies to files only.
*/
hashValue: string;
isBranch: boolean;
isPendingChange: boolean;
/**
* The size of the file, if applicable.
*/
size: number;
version: number;
}
/**
* Item path and Version descriptor properties
*/
export interface TfvcItemDescriptor {
path: string;
recursionLevel: VersionControlRecursionType;
version: string;
versionOption: TfvcVersionOption;
versionType: TfvcVersionType;
}
export interface TfvcItemRequestData {
/**
* If true, include metadata about the file type
*/
includeContentMetadata: boolean;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
itemDescriptors: TfvcItemDescriptor[];
}
export interface TfvcLabel extends TfvcLabelRef {
items: TfvcItem[];
}
export interface TfvcLabelRef {
_links: any;
description: string;
id: number;
labelScope: string;
modifiedDate: Date;
name: string;
owner: VSS_Common_Contracts.IdentityRef;
url: string;
}
export interface TfvcLabelRequestData {
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
itemLabelFilter: string;
labelScope: string;
maxItemCount: number;
name: string;
owner: string;
}
export interface TfvcMergeSource {
/**
* Indicates if this a rename source. If false, it is a merge source.
*/
isRename: boolean;
/**
* The server item of the merge source
*/
serverItem: string;
/**
* Start of the version range
*/
versionFrom: number;
/**
* End of the version range
*/
versionTo: number;
}
export interface TfvcPolicyFailureInfo {
message: string;
policyName: string;
}
export interface TfvcPolicyOverrideInfo {
comment: string;
policyFailures: TfvcPolicyFailureInfo[];
}
export interface TfvcShallowBranchRef {
path: string;
}
export interface TfvcShelveset extends TfvcShelvesetRef {
changes: TfvcChange[];
notes: CheckinNote[];
policyOverride: TfvcPolicyOverrideInfo;
workItems: AssociatedWorkItem[];
}
export interface TfvcShelvesetRef {
_links: any;
comment: string;
commentTruncated: boolean;
createdDate: Date;
id: string;
name: string;
owner: VSS_Common_Contracts.IdentityRef;
url: string;
}
export interface TfvcShelvesetRequestData {
/**
* Whether to include policyOverride and notes
*/
includeDetails: boolean;
/**
* Whether to include the _links field on the shallow references
*/
includeLinks: boolean;
/**
* Whether to include workItems
*/
includeWorkItems: boolean;
/**
* Max number of changes to include
*/
maxChangeCount: number;
/**
* Max length of comment
*/
maxCommentLength: number;
/**
* Shelveset's name
*/
name: string;
/**
* Owner's ID. Could be a name or a guid.
*/
owner: string;
}
export interface TfvcVersionDescriptor {
version: string;
versionOption: TfvcVersionOption;
versionType: TfvcVersionType;
}
export enum TfvcVersionOption {
None = 0,
Previous = 1,
UseRename = 2,
}
export enum TfvcVersionType {
None = 0,
Changeset = 1,
Shelveset = 2,
Change = 3,
Date = 4,
Latest = 5,
Tip = 6,
MergeSource = 7,
}
export interface UpdateRefsRequest {
refUpdateRequests: GitRefUpdate[];
updateMode: GitRefUpdateMode;
}
export enum VersionControlChangeType {
None = 0,
Add = 1,
Edit = 2,
Encoding = 4,
Rename = 8,
Delete = 16,
Undelete = 32,
Branch = 64,
Merge = 128,
Lock = 256,
Rollback = 512,
SourceRename = 1024,
TargetRename = 2048,
Property = 4096,
All = 8191,
}
export interface VersionControlProjectInfo {
defaultSourceControlType: TFS_Core_Contracts.SourceControlTypes;
project: TFS_Core_Contracts.TeamProjectReference;
supportsGit: boolean;
supportsTFVC: boolean;
}
export enum VersionControlRecursionType {
/**
* Only return the specified item.
*/
None = 0,
/**
* Return the specified item and its direct children.
*/
OneLevel = 1,
/**
* Return the specified item and its direct children, as well as recursive chains of nested child folders that only contain a single folder.
*/
OneLevelPlusNestedEmptyFolders = 4,
/**
* Return specified item and all descendants
*/
Full = 120,
}
export var TypeInfo: {
AssociatedWorkItem: {
fields: any;
};
Change: {
fields: any;
};
ChangeCountDictionary: {
fields: any;
};
ChangeList: {
fields: any;
};
ChangeListSearchCriteria: {
fields: any;
};
CheckinNote: {
fields: any;
};
FileContentMetadata: {
fields: any;
};
GitBaseVersionDescriptor: {
fields: any;
};
GitBlobRef: {
fields: any;
};
GitBranchStats: {
fields: any;
};
GitChange: {
fields: any;
};
GitCommit: {
fields: any;
};
GitCommitChanges: {
fields: any;
};
GitCommitDiffs: {
fields: any;
};
GitCommitRef: {
fields: any;
};
GitCommitToCreate: {
fields: any;
};
GitDeletedRepository: {
fields: any;
};
GitHistoryQueryResults: {
fields: any;
};
GitItem: {
fields: any;
};
GitItemDescriptor: {
fields: any;
};
GitItemRequestData: {
fields: any;
};
GitMediaObjectRef: {
fields: any;
};
GitObjectType: {
enumValues: {
"bad": number;
"commit": number;
"tree": number;
"blob": number;
"tag": number;
"ext2": number;
"ofsDelta": number;
"refDelta": number;
};
};
GitPathAction: {
fields: any;
};
GitPathActions: {
enumValues: {
"none": number;
"edit": number;
"delete": number;
"add": number;
"rename": number;
};
};
GitPullRequest: {
fields: any;
};
GitPullRequestCompletionOptions: {
fields: any;
};
GitPullRequestSearchCriteria: {
fields: any;
};
GitPush: {
fields: any;
};
GitPushEventData: {
fields: any;
};
GitPushRef: {
fields: any;
};
GitPushSearchCriteria: {
fields: any;
};
GitQueryCommitsCriteria: {
fields: any;
};
GitRef: {
fields: any;
};
GitRefUpdate: {
fields: any;
};
GitRefUpdateMode: {
enumValues: {
"bestEffort": number;
"allOrNone": number;
};
};
GitRefUpdateResult: {
fields: any;
};
GitRefUpdateResultSet: {
fields: any;
};
GitRefUpdateStatus: {
enumValues: {
"succeeded": number;
"forcePushRequired": number;
"staleOldObjectId": number;
"invalidRefName": number;
"unprocessed": number;
"unresolvableToCommit": number;
"writePermissionRequired": number;
"manageNotePermissionRequired": number;
"createBranchPermissionRequired": number;
"createTagPermissionRequired": number;
"rejectedByPlugin": number;
"locked": number;
"refNameConflict": number;
"rejectedByPolicy": number;
"succeededNonExistentRef": number;
"succeededCorruptRef": number;
};
};
GitRepository: {
fields: any;
};
GitRepositoryPermissions: {
enumValues: {
"none": number;
"administer": number;
"genericRead": number;
"genericContribute": number;
"forcePush": number;
"createBranch": number;
"createTag": number;
"manageNote": number;
"policyExempt": number;
"all": number;
"branchLevelPermissions": number;
};
};
GitStatus: {
fields: any;
};
GitStatusContext: {
fields: any;
};
GitStatusState: {
enumValues: {
"notSet": number;
"pending": number;
"succeeded": number;
"failed": number;
"error": number;
};
};
GitSuggestion: {
fields: any;
};
GitTargetVersionDescriptor: {
fields: any;
};
GitTreeEntryRef: {
fields: any;
};
GitTreeRef: {
fields: any;
};
GitUserDate: {
fields: any;
};
GitVersionDescriptor: {
fields: any;
};
GitVersionOptions: {
enumValues: {
"none": number;
"previousChange": number;
"firstParent": number;
};
};
GitVersionType: {
enumValues: {
"branch": number;
"tag": number;
"commit": number;
"index": number;
};
};
HistoryEntry: {
fields: any;
};
HistoryQueryResults: {
fields: any;
};
IdentityRefWithVote: {
fields: any;
};
IncludedGitCommit: {
fields: any;
};
ItemContent: {
fields: any;
};
ItemContentType: {
enumValues: {
"rawText": number;
"base64Encoded": number;
};
};
ItemDetailsOptions: {
fields: any;
};
ItemModel: {
fields: any;
};
PullRequestAsyncStatus: {
enumValues: {
"notSet": number;
"queued": number;
"conflicts": number;
"succeeded": number;
"rejectedByPolicy": number;
"failure": number;
};
};
PullRequestStatus: {
enumValues: {
"notSet": number;
"active": number;
"abandoned": number;
"completed": number;
"all": number;
};
};
TfvcBranch: {
fields: any;
};
TfvcBranchMapping: {
fields: any;
};
TfvcBranchRef: {
fields: any;
};
TfvcChange: {
fields: any;
};
TfvcChangeset: {
fields: any;
};
TfvcChangesetRef: {
fields: any;
};
TfvcChangesetSearchCriteria: {
fields: any;
};
TfvcChangesetsRequestData: {
fields: any;
};
TfvcCheckinEventData: {
fields: any;
};
TfvcHistoryEntry: {
fields: any;
};
TfvcItem: {
fields: any;
};
TfvcItemDescriptor: {
fields: any;
};
TfvcItemRequestData: {
fields: any;
};
TfvcLabel: {
fields: any;
};
TfvcLabelRef: {
fields: any;
};
TfvcLabelRequestData: {
fields: any;
};
TfvcMergeSource: {
fields: any;
};
TfvcPolicyFailureInfo: {
fields: any;
};
TfvcPolicyOverrideInfo: {
fields: any;
};
TfvcShallowBranchRef: {
fields: any;
};
TfvcShelveset: {
fields: any;
};
TfvcShelvesetRef: {
fields: any;
};
TfvcShelvesetRequestData: {
fields: any;
};
TfvcVersionDescriptor: {
fields: any;
};
TfvcVersionOption: {
enumValues: {
"none": number;
"previous": number;
"useRename": number;
};
};
TfvcVersionType: {
enumValues: {
"none": number;
"changeset": number;
"shelveset": number;
"change": number;
"date": number;
"latest": number;
"tip": number;
"mergeSource": number;
};
};
UpdateRefsRequest: {
fields: any;
};
VersionControlChangeType: {
enumValues: {
"none": number;
"add": number;
"edit": number;
"encoding": number;
"rename": number;
"delete": number;
"undelete": number;
"branch": number;
"merge": number;
"lock": number;
"rollback": number;
"sourceRename": number;
"targetRename": number;
"property": number;
"all": number;
};
};
VersionControlProjectInfo: {
fields: any;
};
VersionControlRecursionType: {
enumValues: {
"none": number;
"oneLevel": number;
"oneLevelPlusNestedEmptyFolders": number;
"full": number;
};
};
};
}
declare module "TFS/VersionControl/Controls" {
import Contracts_Platform = require("VSS/Common/Contracts/Platform");
export interface IHistoryList {
/**
* Query the history by providing certain searchCriteria
* @param itemPath itemPath for control to search history in Git and Tfvc
* @param fromVersion fromId for control to search history in Git and Tfvc
* @param toVersion toId for control to search history in Git and Tfvc
* @param repositoryId Optional repository Id for control to search history in Git
*/
createHistoryList(itemPath: string, fromVersion: string, toVersion: string, repositoryId?: string): any;
}
/**
* Control showing the history list control
*/
export module HistoryList {
var contributionId: string;
/**
* Create an instance of the history list control
*
* @param $container Container element to create the history list control in
* @param options History list control options
* @param webContext Optional web context to scope the control to
*/
function create($container: JQuery, options?: any, webContext?: Contracts_Platform.WebContext): IPromise<IHistoryList>;
}
}
declare module "TFS/VersionControl/GitRestClient" {
import Contracts = require("TFS/VersionControl/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class GitHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* Gets a single blob.
*
* @param {string} repositoryId
* @param {string} sha1
* @param {string} project - Project ID or project name
* @param {boolean} download
* @param {string} fileName
* @return IPromise<Contracts.GitBlobRef>
*/
getBlob(repositoryId: string, sha1: string, project?: string, download?: boolean, fileName?: string): IPromise<Contracts.GitBlobRef>;
/**
* Gets a single blob.
*
* @param {string} repositoryId
* @param {string} sha1
* @param {string} project - Project ID or project name
* @param {boolean} download
* @param {string} fileName
* @return IPromise<ArrayBuffer>
*/
getBlobContent(repositoryId: string, sha1: string, project?: string, download?: boolean, fileName?: string): IPromise<ArrayBuffer>;
/**
* Gets one or more blobs in a zip file download.
*
* @param {string[]} blobIds
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @param {string} filename
* @return IPromise<ArrayBuffer>
*/
getBlobsZip(blobIds: string[], repositoryId: string, project?: string, filename?: string): IPromise<ArrayBuffer>;
/**
* Gets a single blob.
*
* @param {string} repositoryId
* @param {string} sha1
* @param {string} project - Project ID or project name
* @param {boolean} download
* @param {string} fileName
* @return IPromise<ArrayBuffer>
*/
getBlobZip(repositoryId: string, sha1: string, project?: string, download?: boolean, fileName?: string): IPromise<ArrayBuffer>;
/**
* Retrieve statistics about a single branch.
*
* @param {string} repositoryId - Friendly name or guid of repository
* @param {string} name - Name of the branch
* @param {string} project - Project ID or project name
* @param {Contracts.GitVersionDescriptor} baseVersionDescriptor
* @return IPromise<Contracts.GitBranchStats>
*/
getBranch(repositoryId: string, name: string, project?: string, baseVersionDescriptor?: Contracts.GitVersionDescriptor): IPromise<Contracts.GitBranchStats>;
/**
* Retrieve statistics about all branches within a repository.
*
* @param {string} repositoryId - Friendly name or guid of repository
* @param {string} project - Project ID or project name
* @param {Contracts.GitVersionDescriptor} baseVersionDescriptor
* @return IPromise<Contracts.GitBranchStats[]>
*/
getBranches(repositoryId: string, project?: string, baseVersionDescriptor?: Contracts.GitVersionDescriptor): IPromise<Contracts.GitBranchStats[]>;
/**
* Retrieve changes for a particular commit.
*
* @param {string} commitId - The id of the commit.
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {string} project - Project ID or project name
* @param {number} top - The maximum number of changes to return.
* @param {number} skip - The number of changes to skip.
* @return IPromise<Contracts.GitCommitChanges>
*/
getChanges(commitId: string, repositoryId: string, project?: string, top?: number, skip?: number): IPromise<Contracts.GitCommitChanges>;
/**
* Retrieve a particular commit.
*
* @param {string} commitId - The id of the commit.
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {string} project - Project ID or project name
* @param {number} changeCount - The number of changes to include in the result.
* @return IPromise<Contracts.GitCommit>
*/
getCommit(commitId: string, repositoryId: string, project?: string, changeCount?: number): IPromise<Contracts.GitCommit>;
/**
* Retrieve git commits for a project
*
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {Contracts.GitQueryCommitsCriteria} searchCriteria
* @param {string} project - Project ID or project name
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.GitCommitRef[]>
*/
getCommits(repositoryId: string, searchCriteria: Contracts.GitQueryCommitsCriteria, project?: string, skip?: number, top?: number): IPromise<Contracts.GitCommitRef[]>;
/**
* Retrieve a list of commits associated with a particular push.
*
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {number} pushId - The id of the push.
* @param {string} project - Project ID or project name
* @param {number} top - The maximum number of commits to return ("get the top x commits").
* @param {number} skip - The number of commits to skip.
* @param {boolean} includeLinks
* @return IPromise<Contracts.GitCommitRef[]>
*/
getPushCommits(repositoryId: string, pushId: number, project?: string, top?: number, skip?: number, includeLinks?: boolean): IPromise<Contracts.GitCommitRef[]>;
/**
* Retrieve git commits for a project
*
* @param {Contracts.GitQueryCommitsCriteria} searchCriteria - Search options
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {string} project - Project ID or project name
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.GitCommitRef[]>
*/
getCommitsBatch(searchCriteria: Contracts.GitQueryCommitsCriteria, repositoryId: string, project?: string, skip?: number, top?: number): IPromise<Contracts.GitCommitRef[]>;
/**
* @exemptedapi
* [Preview API] Retrieve deleted git repositories.
*
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitDeletedRepository[]>
*/
getDeletedRepositories(project: string): IPromise<Contracts.GitDeletedRepository[]>;
/**
* Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} repositoryId
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} scopePath
* @param {Contracts.VersionControlRecursionType} recursionLevel
* @param {boolean} includeContentMetadata
* @param {boolean} latestProcessedChange
* @param {boolean} download
* @param {Contracts.GitVersionDescriptor} versionDescriptor
* @return IPromise<Contracts.GitItem>
*/
getItem(repositoryId: string, path: string, project?: string, scopePath?: string, recursionLevel?: Contracts.VersionControlRecursionType, includeContentMetadata?: boolean, latestProcessedChange?: boolean, download?: boolean, versionDescriptor?: Contracts.GitVersionDescriptor): IPromise<Contracts.GitItem>;
/**
* Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} repositoryId
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} scopePath
* @param {Contracts.VersionControlRecursionType} recursionLevel
* @param {boolean} includeContentMetadata
* @param {boolean} latestProcessedChange
* @param {boolean} download
* @param {Contracts.GitVersionDescriptor} versionDescriptor
* @return IPromise<ArrayBuffer>
*/
getItemContent(repositoryId: string, path: string, project?: string, scopePath?: string, recursionLevel?: Contracts.VersionControlRecursionType, includeContentMetadata?: boolean, latestProcessedChange?: boolean, download?: boolean, versionDescriptor?: Contracts.GitVersionDescriptor): IPromise<ArrayBuffer>;
/**
* Get Item Metadata and/or Content for a collection of items. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @param {string} scopePath
* @param {Contracts.VersionControlRecursionType} recursionLevel
* @param {boolean} includeContentMetadata
* @param {boolean} latestProcessedChange
* @param {boolean} download
* @param {boolean} includeLinks
* @param {Contracts.GitVersionDescriptor} versionDescriptor
* @return IPromise<Contracts.GitItem[]>
*/
getItems(repositoryId: string, project?: string, scopePath?: string, recursionLevel?: Contracts.VersionControlRecursionType, includeContentMetadata?: boolean, latestProcessedChange?: boolean, download?: boolean, includeLinks?: boolean, versionDescriptor?: Contracts.GitVersionDescriptor): IPromise<Contracts.GitItem[]>;
/**
* Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} repositoryId
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} scopePath
* @param {Contracts.VersionControlRecursionType} recursionLevel
* @param {boolean} includeContentMetadata
* @param {boolean} latestProcessedChange
* @param {boolean} download
* @param {Contracts.GitVersionDescriptor} versionDescriptor
* @return IPromise<string>
*/
getItemText(repositoryId: string, path: string, project?: string, scopePath?: string, recursionLevel?: Contracts.VersionControlRecursionType, includeContentMetadata?: boolean, latestProcessedChange?: boolean, download?: boolean, versionDescriptor?: Contracts.GitVersionDescriptor): IPromise<string>;
/**
* Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} repositoryId
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} scopePath
* @param {Contracts.VersionControlRecursionType} recursionLevel
* @param {boolean} includeContentMetadata
* @param {boolean} latestProcessedChange
* @param {boolean} download
* @param {Contracts.GitVersionDescriptor} versionDescriptor
* @return IPromise<ArrayBuffer>
*/
getItemZip(repositoryId: string, path: string, project?: string, scopePath?: string, recursionLevel?: Contracts.VersionControlRecursionType, includeContentMetadata?: boolean, latestProcessedChange?: boolean, download?: boolean, versionDescriptor?: Contracts.GitVersionDescriptor): IPromise<ArrayBuffer>;
/**
* Post for retrieving a creating a batch out of a set of items in a repo / project given a list of paths or a long path
*
* @param {Contracts.GitItemRequestData} requestData
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitItem[][]>
*/
getItemsBatch(requestData: Contracts.GitItemRequestData, repositoryId: string, project?: string): IPromise<Contracts.GitItem[][]>;
/**
* @exemptedapi
* [Preview API] Retrieve pull request's commits
*
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitCommitRef[]>
*/
getPullRequestCommits(repositoryId: string, pullRequestId: number, project?: string): IPromise<Contracts.GitCommitRef[]>;
/**
* Adds a reviewer to a git pull request
*
* @param {Contracts.IdentityRefWithVote} reviewer
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} reviewerId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.IdentityRefWithVote>
*/
createPullRequestReviewer(reviewer: Contracts.IdentityRefWithVote, repositoryId: string, pullRequestId: number, reviewerId: string, project?: string): IPromise<Contracts.IdentityRefWithVote>;
/**
* Adds reviewers to a git pull request
*
* @param {VSS_Common_Contracts.IdentityRef[]} reviewers
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.IdentityRefWithVote[]>
*/
createPullRequestReviewers(reviewers: VSS_Common_Contracts.IdentityRef[], repositoryId: string, pullRequestId: number, project?: string): IPromise<Contracts.IdentityRefWithVote[]>;
/**
* Adds reviewers to a git pull request
*
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} reviewerId
* @param {string} project - Project ID or project name
* @return IPromise<void>
*/
deletePullRequestReviewer(repositoryId: string, pullRequestId: number, reviewerId: string, project?: string): IPromise<void>;
/**
* Retrieve a reviewer from a pull request
*
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} reviewerId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.IdentityRefWithVote>
*/
getPullRequestReviewer(repositoryId: string, pullRequestId: number, reviewerId: string, project?: string): IPromise<Contracts.IdentityRefWithVote>;
/**
* Retrieve a pull request reviewers
*
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.IdentityRefWithVote[]>
*/
getPullRequestReviewers(repositoryId: string, pullRequestId: number, project?: string): IPromise<Contracts.IdentityRefWithVote[]>;
/**
* Create a git pull request
*
* @param {Contracts.GitPullRequest} gitPullRequestToCreate
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitPullRequest>
*/
createPullRequest(gitPullRequestToCreate: Contracts.GitPullRequest, repositoryId: string, project?: string): IPromise<Contracts.GitPullRequest>;
/**
* Retrieve a pull request
*
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} project - Project ID or project name
* @param {number} maxCommentLength
* @param {number} skip
* @param {number} top
* @param {boolean} includeCommits
* @param {boolean} includeWorkItemRefs
* @return IPromise<Contracts.GitPullRequest>
*/
getPullRequest(repositoryId: string, pullRequestId: number, project?: string, maxCommentLength?: number, skip?: number, top?: number, includeCommits?: boolean, includeWorkItemRefs?: boolean): IPromise<Contracts.GitPullRequest>;
/**
* Query for pull requests
*
* @param {string} repositoryId
* @param {Contracts.GitPullRequestSearchCriteria} searchCriteria
* @param {string} project - Project ID or project name
* @param {number} maxCommentLength
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.GitPullRequest[]>
*/
getPullRequests(repositoryId: string, searchCriteria: Contracts.GitPullRequestSearchCriteria, project?: string, maxCommentLength?: number, skip?: number, top?: number): IPromise<Contracts.GitPullRequest[]>;
/**
* Updates a pull request
*
* @param {Contracts.GitPullRequest} gitPullRequestToUpdate
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitPullRequest>
*/
updatePullRequest(gitPullRequestToUpdate: Contracts.GitPullRequest, repositoryId: string, pullRequestId: number, project?: string): IPromise<Contracts.GitPullRequest>;
/**
* @exemptedapi
* [Preview API] Query pull requests by project
*
* @param {string} project - Project ID or project name
* @param {Contracts.GitPullRequestSearchCriteria} searchCriteria
* @param {number} maxCommentLength
* @param {number} skip
* @param {number} top
* @return IPromise<Contracts.GitPullRequest[]>
*/
getPullRequestsByProject(project: string, searchCriteria: Contracts.GitPullRequestSearchCriteria, maxCommentLength?: number, skip?: number, top?: number): IPromise<Contracts.GitPullRequest[]>;
/**
* @exemptedapi
* [Preview API] Retrieve a pull request work items
*
* @param {string} repositoryId
* @param {number} pullRequestId
* @param {string} project - Project ID or project name
* @param {number} commitsTop
* @param {number} commitsSkip
* @return IPromise<Contracts.AssociatedWorkItem[]>
*/
getPullRequestWorkItems(repositoryId: string, pullRequestId: number, project?: string, commitsTop?: number, commitsSkip?: number): IPromise<Contracts.AssociatedWorkItem[]>;
/**
* Push changes to the repository.
*
* @param {Contracts.GitPush} push
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, a project-scoped route must be used.
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitPush>
*/
createPush(push: Contracts.GitPush, repositoryId: string, project?: string): IPromise<Contracts.GitPush>;
/**
* Retrieve a particular push.
*
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {number} pushId - The id of the push.
* @param {string} project - Project ID or project name
* @param {number} includeCommits - The number of commits to include in the result.
* @param {boolean} includeRefUpdates
* @return IPromise<Contracts.GitPush>
*/
getPush(repositoryId: string, pushId: number, project?: string, includeCommits?: number, includeRefUpdates?: boolean): IPromise<Contracts.GitPush>;
/**
* Retrieves pushes associated with the specified repository.
*
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {string} project - Project ID or project name
* @param {number} skip
* @param {number} top
* @param {Contracts.GitPushSearchCriteria} searchCriteria
* @return IPromise<Contracts.GitPush[]>
*/
getPushes(repositoryId: string, project?: string, skip?: number, top?: number, searchCriteria?: Contracts.GitPushSearchCriteria): IPromise<Contracts.GitPush[]>;
/**
* Queries the provided repository for its refs and returns them.
*
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {string} project - Project ID or project name
* @param {string} filter - [optional] A filter to apply to the refs.
* @param {boolean} includeLinks - [optional] Specifies if referenceLinks should be included in the result. default is false.
* @param {boolean} includeStatuses - [optional] Includes the first 1000 statuses of the commits the refs are pointing at as well. default is false.
* @return IPromise<Contracts.GitRef[]>
*/
getRefs(repositoryId: string, project?: string, filter?: string, includeLinks?: boolean, includeStatuses?: boolean): IPromise<Contracts.GitRef[]>;
/**
* Creates or updates refs with the given information
*
* @param {Contracts.GitRefUpdate[]} refUpdates - List of ref updates to attempt to perform
* @param {string} repositoryId - The id or friendly name of the repository. To use the friendly name, projectId must also be specified.
* @param {string} project - Project ID or project name
* @param {string} projectId - The id of the project.
* @return IPromise<Contracts.GitRefUpdateResult[]>
*/
updateRefs(refUpdates: Contracts.GitRefUpdate[], repositoryId: string, project?: string, projectId?: string): IPromise<Contracts.GitRefUpdateResult[]>;
/**
* Create a git repository
*
* @param {Contracts.GitRepository} gitRepositoryToCreate
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitRepository>
*/
createRepository(gitRepositoryToCreate: Contracts.GitRepository, project?: string): IPromise<Contracts.GitRepository>;
/**
* Delete a git repository
*
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<void>
*/
deleteRepository(repositoryId: string, project?: string): IPromise<void>;
/**
* Retrieve git repositories.
*
* @param {string} project - Project ID or project name
* @param {boolean} includeLinks
* @return IPromise<Contracts.GitRepository[]>
*/
getRepositories(project?: string, includeLinks?: boolean): IPromise<Contracts.GitRepository[]>;
/**
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitRepository>
*/
getRepository(repositoryId: string, project?: string): IPromise<Contracts.GitRepository>;
/**
* Updates the Git repository with the single populated change in the specified repository information.
*
* @param {Contracts.GitRepository} newRepositoryInfo
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitRepository>
*/
updateRepository(newRepositoryInfo: Contracts.GitRepository, repositoryId: string, project?: string): IPromise<Contracts.GitRepository>;
/**
* @param {Contracts.GitStatus} gitCommitStatusToCreate
* @param {string} commitId
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitStatus>
*/
createCommitStatus(gitCommitStatusToCreate: Contracts.GitStatus, commitId: string, repositoryId: string, project?: string): IPromise<Contracts.GitStatus>;
/**
* @param {string} commitId
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @param {number} top
* @param {number} skip
* @return IPromise<Contracts.GitStatus[]>
*/
getStatuses(commitId: string, repositoryId: string, project?: string, top?: number, skip?: number): IPromise<Contracts.GitStatus[]>;
/**
* @exemptedapi
* [Preview API] Retrieve a set of suggestions (including a pull request suggestion).
*
* @param {string} repositoryId
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.GitSuggestion[]>
*/
getSuggestions(repositoryId: string, project?: string): IPromise<Contracts.GitSuggestion[]>;
/**
* @param {string} repositoryId
* @param {string} sha1
* @param {string} project - Project ID or project name
* @param {string} projectId
* @param {boolean} recursive
* @param {string} fileName
* @return IPromise<Contracts.GitTreeRef>
*/
getTree(repositoryId: string, sha1: string, project?: string, projectId?: string, recursive?: boolean, fileName?: string): IPromise<Contracts.GitTreeRef>;
/**
* @param {string} repositoryId
* @param {string} sha1
* @param {string} project - Project ID or project name
* @param {string} projectId
* @param {boolean} recursive
* @param {string} fileName
* @return IPromise<ArrayBuffer>
*/
getTreeZip(repositoryId: string, sha1: string, project?: string, projectId?: string, recursive?: boolean, fileName?: string): IPromise<ArrayBuffer>;
}
export class GitHttpClient extends GitHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return GitHttpClient2_2
*/
export function getClient(): GitHttpClient2_2;
}
declare module "TFS/VersionControl/Services" {
import Contracts_Platform = require("VSS/Common/Contracts/Platform");
/**
* Host service for common code actions
*/
export interface IVersionControlActionService {
/** Launches create branch dialog
* @param workItemIds The work item ids to link to the newly created branch
*/
beginLaunchCreateBranchDialog(workItemIds: number[]): IPromise<void>;
/** Features required for actions, the actions will not work as desired when users do not have license for the listed features. */
requiredFeaturesForActions?: string[];
}
/**
* Host service for version control actions
*/
export module VersionControlActionService {
var contributionId: string;
var fullyQualifiedContributionId: string;
/** Get an instance of the code action service
* @param webContext Optional web context to scope the service to
*/
function getService(webContext?: Contracts_Platform.WebContext): IPromise<IVersionControlActionService>;
}
}
declare module "TFS/VersionControl/TfvcRestClient" {
import TFS_VersionControl_Contracts = require("TFS/VersionControl/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class TfvcHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* Get a single branch hierarchy at the given path with parents or children (if specified)
*
* @param {string} path
* @param {string} project - Project ID or project name
* @param {boolean} includeParent
* @param {boolean} includeChildren
* @return IPromise<TFS_VersionControl_Contracts.TfvcBranch>
*/
getBranch(path: string, project?: string, includeParent?: boolean, includeChildren?: boolean): IPromise<TFS_VersionControl_Contracts.TfvcBranch>;
/**
* Get a collection of branch roots -- first-level children, branches with no parents
*
* @param {string} project - Project ID or project name
* @param {boolean} includeParent
* @param {boolean} includeChildren
* @param {boolean} includeDeleted
* @param {boolean} includeLinks
* @return IPromise<TFS_VersionControl_Contracts.TfvcBranch[]>
*/
getBranches(project?: string, includeParent?: boolean, includeChildren?: boolean, includeDeleted?: boolean, includeLinks?: boolean): IPromise<TFS_VersionControl_Contracts.TfvcBranch[]>;
/**
* Get branch hierarchies below the specified scopePath
*
* @param {string} scopePath
* @param {string} project - Project ID or project name
* @param {boolean} includeDeleted
* @param {boolean} includeLinks
* @return IPromise<TFS_VersionControl_Contracts.TfvcBranchRef[]>
*/
getBranchRefs(scopePath: string, project?: string, includeDeleted?: boolean, includeLinks?: boolean): IPromise<TFS_VersionControl_Contracts.TfvcBranchRef[]>;
/**
* Retrieve Tfvc changes for a given changeset
*
* @param {number} id
* @param {number} skip
* @param {number} top
* @return IPromise<TFS_VersionControl_Contracts.TfvcChange[]>
*/
getChangesetChanges(id?: number, skip?: number, top?: number): IPromise<TFS_VersionControl_Contracts.TfvcChange[]>;
/**
* @param {TFS_VersionControl_Contracts.TfvcChangeset} changeset
* @param {string} project - Project ID or project name
* @return IPromise<TFS_VersionControl_Contracts.TfvcChangesetRef>
*/
createChangeset(changeset: TFS_VersionControl_Contracts.TfvcChangeset, project?: string): IPromise<TFS_VersionControl_Contracts.TfvcChangesetRef>;
/**
* Retrieve a Tfvc Changeset
*
* @param {number} id
* @param {string} project - Project ID or project name
* @param {number} maxChangeCount
* @param {boolean} includeDetails
* @param {boolean} includeWorkItems
* @param {number} maxCommentLength
* @param {boolean} includeSourceRename
* @param {number} skip
* @param {number} top
* @param {string} orderby
* @param {TFS_VersionControl_Contracts.TfvcChangesetSearchCriteria} searchCriteria
* @return IPromise<TFS_VersionControl_Contracts.TfvcChangeset>
*/
getChangeset(id: number, project?: string, maxChangeCount?: number, includeDetails?: boolean, includeWorkItems?: boolean, maxCommentLength?: number, includeSourceRename?: boolean, skip?: number, top?: number, orderby?: string, searchCriteria?: TFS_VersionControl_Contracts.TfvcChangesetSearchCriteria): IPromise<TFS_VersionControl_Contracts.TfvcChangeset>;
/**
* Retrieve Tfvc changesets
*
* @param {string} project - Project ID or project name
* @param {number} maxChangeCount
* @param {boolean} includeDetails
* @param {boolean} includeWorkItems
* @param {number} maxCommentLength
* @param {boolean} includeSourceRename
* @param {number} skip
* @param {number} top
* @param {string} orderby
* @param {TFS_VersionControl_Contracts.TfvcChangesetSearchCriteria} searchCriteria
* @return IPromise<TFS_VersionControl_Contracts.TfvcChangesetRef[]>
*/
getChangesets(project?: string, maxChangeCount?: number, includeDetails?: boolean, includeWorkItems?: boolean, maxCommentLength?: number, includeSourceRename?: boolean, skip?: number, top?: number, orderby?: string, searchCriteria?: TFS_VersionControl_Contracts.TfvcChangesetSearchCriteria): IPromise<TFS_VersionControl_Contracts.TfvcChangesetRef[]>;
/**
* @param {TFS_VersionControl_Contracts.TfvcChangesetsRequestData} changesetsRequestData
* @return IPromise<TFS_VersionControl_Contracts.TfvcChangesetRef[]>
*/
getBatchedChangesets(changesetsRequestData: TFS_VersionControl_Contracts.TfvcChangesetsRequestData): IPromise<TFS_VersionControl_Contracts.TfvcChangesetRef[]>;
/**
* @param {number} id
* @return IPromise<TFS_VersionControl_Contracts.AssociatedWorkItem[]>
*/
getChangesetWorkItems(id?: number): IPromise<TFS_VersionControl_Contracts.AssociatedWorkItem[]>;
/**
* Post for retrieving a set of items given a list of paths or a long path. Allows for specifying the recursionLevel and version descriptors for each path.
*
* @param {TFS_VersionControl_Contracts.TfvcItemRequestData} itemRequestData
* @param {string} project - Project ID or project name
* @return IPromise<TFS_VersionControl_Contracts.TfvcItem[][]>
*/
getItemsBatch(itemRequestData: TFS_VersionControl_Contracts.TfvcItemRequestData, project?: string): IPromise<TFS_VersionControl_Contracts.TfvcItem[][]>;
/**
* Post for retrieving a set of items given a list of paths or a long path. Allows for specifying the recursionLevel and version descriptors for each path.
*
* @param {TFS_VersionControl_Contracts.TfvcItemRequestData} itemRequestData
* @param {string} project - Project ID or project name
* @return IPromise<ArrayBuffer>
*/
getItemsBatchZip(itemRequestData: TFS_VersionControl_Contracts.TfvcItemRequestData, project?: string): IPromise<ArrayBuffer>;
/**
* Get Item Metadata and/or Content. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} fileName
* @param {boolean} download
* @param {string} scopePath
* @param {TFS_VersionControl_Contracts.VersionControlRecursionType} recursionLevel
* @param {TFS_VersionControl_Contracts.TfvcVersionDescriptor} versionDescriptor
* @return IPromise<TFS_VersionControl_Contracts.TfvcItem>
*/
getItem(path: string, project?: string, fileName?: string, download?: boolean, scopePath?: string, recursionLevel?: TFS_VersionControl_Contracts.VersionControlRecursionType, versionDescriptor?: TFS_VersionControl_Contracts.TfvcVersionDescriptor): IPromise<TFS_VersionControl_Contracts.TfvcItem>;
/**
* Get Item Metadata and/or Content. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} fileName
* @param {boolean} download
* @param {string} scopePath
* @param {TFS_VersionControl_Contracts.VersionControlRecursionType} recursionLevel
* @param {TFS_VersionControl_Contracts.TfvcVersionDescriptor} versionDescriptor
* @return IPromise<ArrayBuffer>
*/
getItemContent(path: string, project?: string, fileName?: string, download?: boolean, scopePath?: string, recursionLevel?: TFS_VersionControl_Contracts.VersionControlRecursionType, versionDescriptor?: TFS_VersionControl_Contracts.TfvcVersionDescriptor): IPromise<ArrayBuffer>;
/**
* Get a list of Tfvc items
*
* @param {string} project - Project ID or project name
* @param {string} scopePath
* @param {TFS_VersionControl_Contracts.VersionControlRecursionType} recursionLevel
* @param {boolean} includeLinks
* @param {TFS_VersionControl_Contracts.TfvcVersionDescriptor} versionDescriptor
* @return IPromise<TFS_VersionControl_Contracts.TfvcItem[]>
*/
getItems(project?: string, scopePath?: string, recursionLevel?: TFS_VersionControl_Contracts.VersionControlRecursionType, includeLinks?: boolean, versionDescriptor?: TFS_VersionControl_Contracts.TfvcVersionDescriptor): IPromise<TFS_VersionControl_Contracts.TfvcItem[]>;
/**
* Get Item Metadata and/or Content. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} fileName
* @param {boolean} download
* @param {string} scopePath
* @param {TFS_VersionControl_Contracts.VersionControlRecursionType} recursionLevel
* @param {TFS_VersionControl_Contracts.TfvcVersionDescriptor} versionDescriptor
* @return IPromise<string>
*/
getItemText(path: string, project?: string, fileName?: string, download?: boolean, scopePath?: string, recursionLevel?: TFS_VersionControl_Contracts.VersionControlRecursionType, versionDescriptor?: TFS_VersionControl_Contracts.TfvcVersionDescriptor): IPromise<string>;
/**
* Get Item Metadata and/or Content. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
*
* @param {string} path
* @param {string} project - Project ID or project name
* @param {string} fileName
* @param {boolean} download
* @param {string} scopePath
* @param {TFS_VersionControl_Contracts.VersionControlRecursionType} recursionLevel
* @param {TFS_VersionControl_Contracts.TfvcVersionDescriptor} versionDescriptor
* @return IPromise<ArrayBuffer>
*/
getItemZip(path: string, project?: string, fileName?: string, download?: boolean, scopePath?: string, recursionLevel?: TFS_VersionControl_Contracts.VersionControlRecursionType, versionDescriptor?: TFS_VersionControl_Contracts.TfvcVersionDescriptor): IPromise<ArrayBuffer>;
/**
* Get items under a label.
*
* @param {string} labelId - Unique identifier of label
* @param {number} top - Max number of items to return
* @param {number} skip - Number of items to skip
* @return IPromise<TFS_VersionControl_Contracts.TfvcItem[]>
*/
getLabelItems(labelId: string, top?: number, skip?: number): IPromise<TFS_VersionControl_Contracts.TfvcItem[]>;
/**
* Get a single deep label.
*
* @param {string} labelId - Unique identifier of label
* @param {TFS_VersionControl_Contracts.TfvcLabelRequestData} requestData - maxItemCount
* @param {string} project - Project ID or project name
* @return IPromise<TFS_VersionControl_Contracts.TfvcLabel>
*/
getLabel(labelId: string, requestData: TFS_VersionControl_Contracts.TfvcLabelRequestData, project?: string): IPromise<TFS_VersionControl_Contracts.TfvcLabel>;
/**
* Get a collection of shallow label references.
*
* @param {TFS_VersionControl_Contracts.TfvcLabelRequestData} requestData - labelScope, name, owner, and itemLabelFilter
* @param {string} project - Project ID or project name
* @param {number} top - Max number of labels to return
* @param {number} skip - Number of labels to skip
* @return IPromise<TFS_VersionControl_Contracts.TfvcLabelRef[]>
*/
getLabels(requestData: TFS_VersionControl_Contracts.TfvcLabelRequestData, project?: string, top?: number, skip?: number): IPromise<TFS_VersionControl_Contracts.TfvcLabelRef[]>;
/**
* [Obsolete - Use the Projects API instead] Retrieve the version control information for a given Team Project
*
* @param {string} projectId - The id (or name) of the team project
* @param {string} project - Project ID or project name
* @return IPromise<TFS_VersionControl_Contracts.VersionControlProjectInfo>
*/
getProjectInfo(projectId: string, project?: string): IPromise<TFS_VersionControl_Contracts.VersionControlProjectInfo>;
/**
* [Obsolete - Use the Projects API instead]
*
* @param {string} project - Project ID or project name
* @return IPromise<TFS_VersionControl_Contracts.VersionControlProjectInfo[]>
*/
getProjectInfos(project?: string): IPromise<TFS_VersionControl_Contracts.VersionControlProjectInfo[]>;
/**
* Get changes included in a shelveset.
*
* @param {string} shelvesetId - Shelveset's unique ID
* @param {number} top - Max number of changes to return
* @param {number} skip - Number of changes to skip
* @return IPromise<TFS_VersionControl_Contracts.TfvcChange[]>
*/
getShelvesetChanges(shelvesetId: string, top?: number, skip?: number): IPromise<TFS_VersionControl_Contracts.TfvcChange[]>;
/**
* Get a single deep shelveset.
*
* @param {string} shelvesetId - Shelveset's unique ID
* @param {TFS_VersionControl_Contracts.TfvcShelvesetRequestData} requestData - includeDetails, includeWorkItems, maxChangeCount, and maxCommentLength
* @return IPromise<TFS_VersionControl_Contracts.TfvcShelveset>
*/
getShelveset(shelvesetId: string, requestData: TFS_VersionControl_Contracts.TfvcShelvesetRequestData): IPromise<TFS_VersionControl_Contracts.TfvcShelveset>;
/**
* Return a collection of shallow shelveset references.
*
* @param {TFS_VersionControl_Contracts.TfvcShelvesetRequestData} requestData - name, owner, and maxCommentLength
* @param {number} top - Max number of shelvesets to return
* @param {number} skip - Number of shelvesets to skip
* @return IPromise<TFS_VersionControl_Contracts.TfvcShelvesetRef[]>
*/
getShelvesets(requestData: TFS_VersionControl_Contracts.TfvcShelvesetRequestData, top?: number, skip?: number): IPromise<TFS_VersionControl_Contracts.TfvcShelvesetRef[]>;
/**
* Get work items associated with a shelveset.
*
* @param {string} shelvesetId - Shelveset's unique ID
* @return IPromise<TFS_VersionControl_Contracts.AssociatedWorkItem[]>
*/
getShelvesetWorkItems(shelvesetId: string): IPromise<TFS_VersionControl_Contracts.AssociatedWorkItem[]>;
}
export class TfvcHttpClient extends TfvcHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return TfvcHttpClient2_2
*/
export function getClient(): TfvcHttpClient2_2;
}
declare module "TFS/VersionControl/UIContracts" {
import VCContracts = require("TFS/VersionControl/Contracts");
export interface ISourceItem extends VCContracts.ItemModel {
sourceProvider: string;
item: VCContracts.GitItem | VCContracts.TfvcItem;
}
export interface SourceItemContext {
item: ISourceItem;
version: string;
gitRepository?: VCContracts.GitRepository;
}
export interface GitBranchContext {
repository: VCContracts.GitRepository;
ref: VCContracts.GitRef;
view: {
refresh: () => void;
};
}
export interface GitBranchDiffContext {
gitBranchDiff: VCContracts.GitCommitDiffs;
repository: VCContracts.GitRepository;
view: {
refresh: () => void;
};
}
export interface ChangeListSourceItemContext {
change: VCContracts.GitChange | VCContracts.TfvcChange;
changeList: VCContracts.ChangeList<VCContracts.GitItem> | VCContracts.ChangeList<VCContracts.TfvcItem>;
}
}
declare module "TFS/WorkItemTracking/BatchRestClient" {
import VSS_WebApi = require("VSS/WebApi/RestClient");
/**
* Interface for the Json request message
*/
export interface JsonHttpRequest {
/**
* HTTP verb.
*/
method: string;
/**
* Uri of the resource to be invoked.
*/
uri: string;
/**
* Dictionary of the headers to passed along.
*/
headers: IDictionaryStringTo<string>;
/**
* Request body.
*/
body?: any;
}
/**
* Interface for the Json response message
*/
export interface JsonHttpResponse {
/**
* Response code.
*/
code: number;
/**
* Dictionary of the headers to passed along.
*/
headers?: IDictionaryStringTo<string>;
/**
* Request body.
*/
body?: any;
}
/**
* Interface for the Json response message
*/
export interface JsonHttpBatchResponse {
/**
* The number of response objects batched together.
*/
count: number;
/**
* Collection of the responses.
*/
value: JsonHttpResponse[];
}
/**
* @exemptedapi
*/
export class WorkItemTrackingHttpBatchClient extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* [Preview API]
*
* @param {number[]} ids
* @return IPromise<JsonHttpBatchResponse>
*/
destroyWorkItemsBatch(ids: number[]): IPromise<JsonHttpBatchResponse>;
/**
* [Preview API]
*
* @param {number[]} ids
* @return IPromise<JsonHttpBatchResponse>
*/
restoreWorkItemsBatch(ids: number[]): IPromise<JsonHttpBatchResponse>;
/**
* [Preview API]
*
* @param {number[]} ids
* @return IPromise<JsonHttpBatchResponse>
*/
deleteWorkItemsBatch(ids: number[]): IPromise<JsonHttpBatchResponse>;
private _createBatchRequest(ids, httpMethod, resource, body?);
private _beginBatchRequest(requests);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return WorkItemTrackingHttpBatchClient
*/
export function getClient(): WorkItemTrackingHttpBatchClient;
}
declare module "TFS/WorkItemTracking/Contracts" {
export interface AttachmentReference {
id: string;
url: string;
}
export interface FieldDependentRule extends WorkItemTrackingResource {
dependentFields: WorkItemFieldReference[];
}
export interface FieldsToEvaluate {
fields: string[];
fieldUpdates: {
[key: string]: any;
};
fieldValues: {
[key: string]: any;
};
rulesFrom: string[];
}
export enum FieldType {
String = 0,
Integer = 1,
DateTime = 2,
PlainText = 3,
Html = 4,
TreePath = 5,
History = 6,
Double = 7,
Guid = 8,
Boolean = 9,
}
export enum FieldUsage {
None = 0,
WorkItem = 1,
WorkItemLink = 2,
Tree = 3,
WorkItemTypeExtension = 4,
}
export interface IdentityReference {
id: string;
name: string;
url: string;
}
export interface Link {
attributes: {
[key: string]: any;
};
rel: string;
title: string;
url: string;
}
export enum LinkQueryMode {
WorkItems = 0,
LinksOneHopMustContain = 1,
LinksOneHopMayContain = 2,
LinksOneHopDoesNotContain = 3,
LinksRecursiveMustContain = 4,
LinksRecursiveMayContain = 5,
LinksRecursiveDoesNotContain = 6,
}
export enum LogicalOperation {
NONE = 0,
AND = 1,
OR = 2,
}
export interface ProjectReference {
id: string;
name: string;
url: string;
}
export enum ProvisioningActionType {
Import = 0,
Validate = 1,
}
export interface ProvisioningResult {
provisioningImportEvents: string[];
}
export enum QueryExpand {
None = 0,
Wiql = 1,
Clauses = 2,
All = 3,
}
export interface QueryHierarchyItem extends WorkItemTrackingResource {
children: QueryHierarchyItem[];
clauses: WorkItemQueryClause;
columns: WorkItemFieldReference[];
filterOptions: LinkQueryMode;
hasChildren: boolean;
id: string;
isDeleted: boolean;
isFolder: boolean;
isInvalidSyntax: boolean;
isPublic: boolean;
linkClauses: WorkItemQueryClause;
name: string;
path: string;
queryType: QueryType;
sortColumns: WorkItemQuerySortColumn[];
sourceClauses: WorkItemQueryClause;
targetClauses: WorkItemQueryClause;
wiql: string;
}
export enum QueryResultType {
WorkItem = 1,
WorkItemLink = 2,
}
export enum QueryType {
Flat = 1,
Tree = 2,
OneHop = 3,
}
export interface ReportingWorkItemLink {
changedDate: Date;
isActive: boolean;
rel: string;
sourceId: number;
targetId: number;
}
export interface ReportingWorkItemLinksBatch extends StreamedBatch<ReportingWorkItemLink> {
}
export interface ReportingWorkItemRevisionsBatch extends StreamedBatch<WorkItem> {
}
export interface ReportingWorkItemRevisionsFilter {
/**
* A list of fields to return in work item revisions. Omit this parameter to get all reportable fields.
*/
fields: string[];
/**
* Return an identity reference instead of a string value for identity fields.
*/
includeIdentityRef: boolean;
/**
* A list of types to filter the results to specific work item types. Omit this parameter to get work item revisions of all work item types.
*/
types: string[];
}
export interface StreamedBatch<T> {
continuationToken: string;
isLastBatch: boolean;
nextLink: string;
values: T[];
}
export enum TemplateType {
WorkItemType = 0,
GlobalWorkflow = 1,
}
export enum TreeNodeStructureType {
Area = 0,
Iteration = 1,
}
export enum TreeStructureGroup {
Areas = 0,
Iterations = 1,
}
export interface Wiql {
query: string;
}
export interface WitBatchRequest {
body: string;
headers: {
[key: string]: string;
};
method: string;
uri: string;
}
export interface WitBatchResponse {
body: string;
code: number;
headers: {
[key: string]: string;
};
}
export interface WorkItem extends WorkItemTrackingResource {
fields: {
[key: string]: any;
};
id: number;
relations: WorkItemRelation[];
rev: number;
}
export interface WorkItemClassificationNode extends WorkItemTrackingResource {
attributes: {
[key: string]: any;
};
children: WorkItemClassificationNode[];
id: number;
identifier: string;
name: string;
structureType: TreeNodeStructureType;
}
export interface WorkItemDelete extends WorkItemDeleteReference {
resource: WorkItem;
}
export interface WorkItemDeleteReference {
code: number;
deletedBy: string;
deletedDate: string;
id: number;
message: string;
name: string;
project: string;
type: string;
url: string;
}
export interface WorkItemDeleteUpdate {
isDeleted: boolean;
}
export enum WorkItemExpand {
None = 0,
Relations = 1,
Fields = 2,
All = 3,
}
export interface WorkItemField extends WorkItemTrackingResource {
name: string;
readOnly: boolean;
referenceName: string;
supportedOperations: WorkItemFieldOperation[];
type: FieldType;
}
export interface WorkItemFieldOperation {
name: string;
referenceName: string;
}
export interface WorkItemFieldReference {
name: string;
referenceName: string;
url: string;
}
export interface WorkItemFieldUpdate {
newValue: any;
oldValue: any;
}
export interface WorkItemHistory extends WorkItemTrackingResource {
rev: number;
revisedBy: IdentityReference;
revisedDate: Date;
value: string;
}
export interface WorkItemLink {
rel: string;
source: WorkItemReference;
target: WorkItemReference;
}
export interface WorkItemQueryClause {
clauses: WorkItemQueryClause[];
field: WorkItemFieldReference;
fieldValue: WorkItemFieldReference;
isFieldValue: boolean;
logicalOperator: LogicalOperation;
operator: WorkItemFieldOperation;
value: string;
}
export interface WorkItemQueryResult {
asOf: Date;
columns: WorkItemFieldReference[];
queryResultType: QueryResultType;
queryType: QueryType;
sortColumns: WorkItemQuerySortColumn[];
workItemRelations: WorkItemLink[];
workItems: WorkItemReference[];
}
export interface WorkItemQuerySortColumn {
descending: boolean;
field: WorkItemFieldReference;
}
export interface WorkItemReference {
id: number;
url: string;
}
export interface WorkItemRelation extends Link {
}
export interface WorkItemRelationType extends WorkItemTrackingReference {
attributes: {
[key: string]: any;
};
}
export interface WorkItemRelationUpdates {
added: WorkItemRelation[];
removed: WorkItemRelation[];
updated: WorkItemRelation[];
}
export interface WorkItemRevisionReference extends WorkItemReference {
rev: number;
}
export interface WorkItemTrackingReference extends WorkItemTrackingResource {
name: string;
referenceName: string;
}
export interface WorkItemTrackingResource extends WorkItemTrackingResourceReference {
_links: any;
}
export interface WorkItemTrackingResourceReference {
url: string;
}
export interface WorkItemType extends WorkItemTrackingResource {
description: string;
fields: WorkItemTypeFieldInstance[];
name: string;
xmlForm: string;
}
export interface WorkItemTypeCategory extends WorkItemTrackingResource {
defaultWorkItemType: WorkItemTypeReference;
name: string;
referenceName: string;
workItemTypes: WorkItemTypeReference[];
}
export interface WorkItemTypeFieldInstance {
field: WorkItemFieldReference;
helpText: string;
}
export interface WorkItemTypeReference extends WorkItemTrackingResourceReference {
name: string;
}
export interface WorkItemTypeTemplate {
template: string;
}
export interface WorkItemTypeTemplateUpdateModel {
actionType: ProvisioningActionType;
methodology: string;
template: string;
templateType: TemplateType;
}
export interface WorkItemUpdate extends WorkItemTrackingResourceReference {
fields: {
[key: string]: WorkItemFieldUpdate;
};
id: number;
relations: WorkItemRelationUpdates;
rev: number;
revisedBy: IdentityReference;
revisedDate: Date;
workItemId: number;
}
export var TypeInfo: {
AttachmentReference: {
fields: any;
};
FieldDependentRule: {
fields: any;
};
FieldsToEvaluate: {
fields: any;
};
FieldType: {
enumValues: {
"string": number;
"integer": number;
"dateTime": number;
"plainText": number;
"html": number;
"treePath": number;
"history": number;
"double": number;
"guid": number;
"boolean": number;
};
};
FieldUsage: {
enumValues: {
"none": number;
"workItem": number;
"workItemLink": number;
"tree": number;
"workItemTypeExtension": number;
};
};
IdentityReference: {
fields: any;
};
Link: {
fields: any;
};
LinkQueryMode: {
enumValues: {
"workItems": number;
"linksOneHopMustContain": number;
"linksOneHopMayContain": number;
"linksOneHopDoesNotContain": number;
"linksRecursiveMustContain": number;
"linksRecursiveMayContain": number;
"linksRecursiveDoesNotContain": number;
};
};
LogicalOperation: {
enumValues: {
"nONE": number;
"aND": number;
"oR": number;
};
};
ProjectReference: {
fields: any;
};
ProvisioningActionType: {
enumValues: {
"import": number;
"validate": number;
};
};
ProvisioningResult: {
fields: any;
};
QueryExpand: {
enumValues: {
"none": number;
"wiql": number;
"clauses": number;
"all": number;
};
};
QueryHierarchyItem: {
fields: any;
};
QueryResultType: {
enumValues: {
"workItem": number;
"workItemLink": number;
};
};
QueryType: {
enumValues: {
"flat": number;
"tree": number;
"oneHop": number;
};
};
ReportingWorkItemLink: {
fields: any;
};
ReportingWorkItemLinksBatch: {
fields: any;
};
ReportingWorkItemRevisionsBatch: {
fields: any;
};
ReportingWorkItemRevisionsFilter: {
fields: any;
};
StreamedBatch: {
fields: any;
};
TemplateType: {
enumValues: {
"workItemType": number;
"globalWorkflow": number;
};
};
TreeNodeStructureType: {
enumValues: {
"area": number;
"iteration": number;
};
| "areas": number;
"iterations": number;
};
};
Wiql: {
fields: any;
};
WitBatchRequest: {
fields: any;
};
WitBatchResponse: {
fields: any;
};
WorkItem: {
fields: any;
};
WorkItemClassificationNode: {
fields: any;
};
WorkItemDelete: {
fields: any;
};
WorkItemDeleteReference: {
fields: any;
};
WorkItemDeleteUpdate: {
fields: any;
};
WorkItemExpand: {
enumValues: {
"none": number;
"relations": number;
"fields": number;
"all": number;
};
};
WorkItemField: {
fields: any;
};
WorkItemFieldOperation: {
fields: any;
};
WorkItemFieldReference: {
fields: any;
};
WorkItemFieldUpdate: {
fields: any;
};
WorkItemHistory: {
fields: any;
};
WorkItemLink: {
fields: any;
};
WorkItemQueryClause: {
fields: any;
};
WorkItemQueryResult: {
fields: any;
};
WorkItemQuerySortColumn: {
fields: any;
};
WorkItemReference: {
fields: any;
};
WorkItemRelation: {
fields: any;
};
WorkItemRelationType: {
fields: any;
};
WorkItemRelationUpdates: {
fields: any;
};
WorkItemRevisionReference: {
fields: any;
};
WorkItemTrackingReference: {
fields: any;
};
WorkItemTrackingResource: {
fields: any;
};
WorkItemTrackingResourceReference: {
fields: any;
};
WorkItemType: {
fields: any;
};
WorkItemTypeCategory: {
fields: any;
};
WorkItemTypeFieldInstance: {
fields: any;
};
WorkItemTypeReference: {
fields: any;
};
WorkItemTypeTemplate: {
fields: any;
};
WorkItemTypeTemplateUpdateModel: {
fields: any;
};
WorkItemUpdate: {
fields: any;
};
};
}
declare module "TFS/WorkItemTracking/ExtensionContracts" {
/**
* Interface defining the arguments for notifications sent by the ActiveWorkItemService
*/
export interface IWorkItemChangedArgs {
/**
* Id of the work item.
*/
id: number;
}
/**
* Interface defining the arguments for the 'onLoaded' notification sent by the ActiveWorkItemService
*/
export interface IWorkItemLoadedArgs extends IWorkItemChangedArgs {
/**
* 'true' if the work item is a 'new', unsaved work item, 'false' otherwise.
*/
isNew: boolean;
}
/**
* Interface defining the arguments for the 'onFieldChanged' notification sent by the ActiveWorkItemService
*/
export interface IWorkItemFieldChangedArgs extends IWorkItemChangedArgs {
/**
* Set of fields that have been changed. 'key' is the field reference name.
*/
changedFields: {
[key: string]: any;
};
}
/**
* Interface defining notifications provided by the ActiveWorkItemService
*/
export interface IWorkItemNotificationListener {
/**
* Called when an extension is loaded
*
* @param workItemLoadedArgs Information about the work item that was loaded.
*/
onLoaded(workItemLoadedArgs: IWorkItemLoadedArgs): void;
/**
* Called when a field is modified
*
* @param fieldChangedArgs Information about the work item that was modified and the fields that were changed.
*/
onFieldChanged(fieldChangedArgs: IWorkItemFieldChangedArgs): void;
/**
* Called when a work item is saved
*
* @param savedEventArgs Information about the work item that was saved.
*/
onSaved(savedEventArgs: IWorkItemChangedArgs): void;
/**
* Called when a work item is refreshed
*
* @param refreshEventArgs Information about the work item that was refreshed.
*/
onRefreshed(refreshEventArgs: IWorkItemChangedArgs): void;
/**
* Called when a work item is reset (undo back to unchanged state)
*
* @param undoEventArgs Information about the work item that was reset.
*/
onReset(undoEventArgs: IWorkItemChangedArgs): void;
/**
* Called when a work item is unloaded
*
* @param unloadedEventArgs Information about the work item that was saved.
*/
onUnloaded(unloadedEventArgs: IWorkItemChangedArgs): void;
}
}
declare module "TFS/WorkItemTracking/ProcessContracts" {
export interface FieldModel {
description: string;
id: string;
name: string;
type: string;
url: string;
}
export var TypeInfo: {
FieldModel: {
fields: any;
};
};
}
declare module "TFS/WorkItemTracking/ProcessDefinitionsContracts" {
/**
* Represent a control in the form.
*/
export interface Control {
/**
* Type of the control.
*/
controlType: string;
/**
* Id for the control
*/
id: string;
/**
* Label for the field
*/
label: string;
/**
* Inner text of the control.
*/
metadata: string;
/**
* Order in which the control should appear in its group.
*/
order: number;
/**
* A value indicating if the control is readonly.
*/
readOnly: boolean;
/**
* A value indicating if the control should be hidden or not.
*/
visible: boolean;
/**
* Watermark text for the textbox.
*/
watermark: string;
}
export interface FieldModel {
description: string;
id: string;
name: string;
type: string;
url: string;
}
export interface FieldRuleModel {
rule: string;
value: string;
}
export interface FieldUpdate {
description: string;
id: string;
}
/**
* Represent a group in the form that holds controls in it.
*/
export interface Group {
/**
* Controls to be put in the group.
*/
controls: Control[];
/**
* Id for the group
*/
id: string;
/**
* Label for the group.
*/
label: string;
/**
* Order in which the group should appear in the section.
*/
order: number;
/**
* A value indicating if the group should be hidden or not.
*/
visible: boolean;
}
export interface WorkItemTypeFieldModel {
id: string;
rules: FieldRuleModel[];
url: string;
}
export interface WorkItemTypeModel {
description: string;
id: string;
/**
* Parent WIT Id/Internal ReferenceName that it inherits from
*/
inherits: string;
name: string;
url: string;
}
export var TypeInfo: {
Control: {
fields: any;
};
FieldModel: {
fields: any;
};
FieldRuleModel: {
fields: any;
};
FieldUpdate: {
fields: any;
};
Group: {
fields: any;
};
WorkItemTypeFieldModel: {
fields: any;
};
WorkItemTypeModel: {
fields: any;
};
};
}
declare module "TFS/WorkItemTracking/ProcessDefinitionsRestClient" {
import ProcessDefinitionsContracts = require("TFS/WorkItemTracking/ProcessDefinitionsContracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
/**
* @exemptedapi
*/
export class WorkItemTrackingHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* [Preview API]
*
* @param {string} processId
* @param {string} witRefName
* @param {string} groupId
* @param {string} fieldRefName
* @return IPromise<void>
*/
removeFieldControlFromGroup(processId: string, witRefName: string, groupId: string, fieldRefName: string): IPromise<void>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.Control} control
* @param {string} processId
* @param {string} witRefName
* @param {string} groupId
* @param {string} fieldRefName
* @param {string} removeFromGroupId
* @return IPromise<void>
*/
setFieldControlInGroup(control: ProcessDefinitionsContracts.Control, processId: string, witRefName: string, groupId: string, fieldRefName: string, removeFromGroupId?: string): IPromise<void>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.FieldModel} field
* @param {string} processId
* @return IPromise<ProcessDefinitionsContracts.FieldModel>
*/
createField(field: ProcessDefinitionsContracts.FieldModel, processId: string): IPromise<ProcessDefinitionsContracts.FieldModel>;
/**
* [Preview API]
*
* @param {string} processId
* @param {string} field
* @return IPromise<void>
*/
deleteField(processId: string, field: string): IPromise<void>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.FieldUpdate} field
* @param {string} processId
* @return IPromise<ProcessDefinitionsContracts.FieldModel>
*/
updateField(field: ProcessDefinitionsContracts.FieldUpdate, processId: string): IPromise<ProcessDefinitionsContracts.FieldModel>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.Group} group
* @param {string} processId
* @param {string} witRefName
* @param {string} pageId
* @param {string} sectionId
* @return IPromise<void>
*/
addGroup(group: ProcessDefinitionsContracts.Group, processId: string, witRefName: string, pageId: string, sectionId: string): IPromise<void>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.Group} group
* @param {string} processId
* @param {string} witRefName
* @param {string} pageId
* @param {string} sectionId
* @param {string} groupId
* @return IPromise<void>
*/
editGroup(group: ProcessDefinitionsContracts.Group, processId: string, witRefName: string, pageId: string, sectionId: string, groupId: string): IPromise<void>;
/**
* [Preview API]
*
* @param {string} processId
* @param {string} witRefName
* @param {string} pageId
* @param {string} sectionId
* @param {string} groupId
* @return IPromise<void>
*/
removeGroup(processId: string, witRefName: string, pageId: string, sectionId: string, groupId: string): IPromise<void>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.Group} group
* @param {string} processId
* @param {string} witRefName
* @param {string} pageId
* @param {string} sectionId
* @param {string} groupId
* @param {string} removeFromSectionId
* @return IPromise<void>
*/
setGroupInSection(group: ProcessDefinitionsContracts.Group, processId: string, witRefName: string, pageId: string, sectionId: string, groupId: string, removeFromSectionId: string): IPromise<void>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.WorkItemTypeFieldModel} field
* @param {string} processId
* @param {string} witRefName
* @return IPromise<ProcessDefinitionsContracts.WorkItemTypeFieldModel>
*/
addWorkItemTypeField(field: ProcessDefinitionsContracts.WorkItemTypeFieldModel, processId: string, witRefName: string): IPromise<ProcessDefinitionsContracts.WorkItemTypeFieldModel>;
/**
* [Preview API]
*
* @param {ProcessDefinitionsContracts.WorkItemTypeModel} workitemType
* @param {string} processId
* @return IPromise<ProcessDefinitionsContracts.WorkItemTypeModel>
*/
createWorkItemType(workitemType: ProcessDefinitionsContracts.WorkItemTypeModel, processId: string): IPromise<ProcessDefinitionsContracts.WorkItemTypeModel>;
/**
* [Preview API]
*
* @param {string} processId
* @param {string} witRefName
* @param {string} field
* @return IPromise<void>
*/
removeWorkItemTypeField(processId: string, witRefName: string, field: string): IPromise<void>;
}
export class WorkItemTrackingHttpClient extends WorkItemTrackingHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return WorkItemTrackingHttpClient2_2
*/
export function getClient(): WorkItemTrackingHttpClient2_2;
}
declare module "TFS/WorkItemTracking/ProcessRestClient" {
import ProcessContracts = require("TFS/WorkItemTracking/ProcessContracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
/**
* @exemptedapi
*/
export class WorkItemTrackingHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* [Preview API]
*
* @param {string} processId
* @return IPromise<ProcessContracts.FieldModel[]>
*/
getFields(processId: string): IPromise<ProcessContracts.FieldModel[]>;
/**
* [Preview API]
*
* @param {string} processId
* @param {string} witRefName
* @return IPromise<ProcessContracts.FieldModel[]>
*/
getWorkItemTypeFields(processId: string, witRefName: string): IPromise<ProcessContracts.FieldModel[]>;
}
export class WorkItemTrackingHttpClient extends WorkItemTrackingHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return WorkItemTrackingHttpClient2_2
*/
export function getClient(): WorkItemTrackingHttpClient2_2;
}
declare module "TFS/WorkItemTracking/ProcessTemplateContracts" {
export interface ProcessExportResult {
data: string;
id: string;
name: string;
}
export interface ProcessImportResult {
helpUrl: string;
id: string;
promoteJobId: string;
validator: ValidationIssue[];
}
export interface ProcessPromoteStatus {
complete: number;
id: string;
message: string;
pending: number;
remainingRetries: number;
successful: boolean;
}
export interface ProcessTemplateModel {
data: string;
id: string;
ignoreWarnings: boolean;
}
export interface ValidationIssue {
description: string;
file: string;
issueType: ValidationIssueType;
line: number;
}
export enum ValidationIssueType {
Warning = 0,
Error = 1,
}
export var TypeInfo: {
ProcessExportResult: {
fields: any;
};
ProcessImportResult: {
fields: any;
};
ProcessPromoteStatus: {
fields: any;
};
ProcessTemplateModel: {
fields: any;
};
ValidationIssue: {
fields: any;
};
ValidationIssueType: {
enumValues: {
"warning": number;
"error": number;
};
};
};
}
declare module "TFS/WorkItemTracking/ProcessTemplateRestClient" {
import ProcessTemplateContracts = require("TFS/WorkItemTracking/ProcessTemplateContracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
/**
* @exemptedapi
*/
export class WorkItemTrackingHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* [Preview API] Returns requested process template
*
* @param {string} id
* @return IPromise<ProcessTemplateContracts.ProcessExportResult>
*/
export(id: string): IPromise<ProcessTemplateContracts.ProcessExportResult>;
/**
* [Preview API] Whether promote has completed for the specified promote job id
*
* @param {string} id
* @return IPromise<ProcessTemplateContracts.ProcessPromoteStatus>
*/
status(id: string): IPromise<ProcessTemplateContracts.ProcessPromoteStatus>;
/**
* [Preview API] Records supplied process template and triggers promote
*
* @param {ProcessTemplateContracts.ProcessTemplateModel} body
* @return IPromise<ProcessTemplateContracts.ProcessImportResult>
*/
import(body: ProcessTemplateContracts.ProcessTemplateModel): IPromise<ProcessTemplateContracts.ProcessImportResult>;
}
export class WorkItemTrackingHttpClient extends WorkItemTrackingHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return WorkItemTrackingHttpClient2_2
*/
export function getClient(): WorkItemTrackingHttpClient2_2;
}
declare module "TFS/WorkItemTracking/RestClient" {
import Contracts = require("TFS/WorkItemTracking/Contracts");
import VSS_Common_Contracts = require("VSS/WebApi/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class WorkItemTrackingHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* Creates an attachment.
*
* @param {string} content - Content to upload
* @param {string} fileName
* @param {string} uploadType
* @return IPromise<Contracts.AttachmentReference>
*/
createAttachment(content: string, fileName?: string, uploadType?: string): IPromise<Contracts.AttachmentReference>;
/**
* Returns an attachment
*
* @param {string} id
* @param {string} fileName
* @return IPromise<ArrayBuffer>
*/
getAttachmentContent(id: string, fileName?: string): IPromise<ArrayBuffer>;
/**
* Returns an attachment
*
* @param {string} id
* @param {string} fileName
* @return IPromise<ArrayBuffer>
*/
getAttachmentZip(id: string, fileName?: string): IPromise<ArrayBuffer>;
/**
* @param {string} project - Project ID or project name
* @param {number} depth
* @return IPromise<Contracts.WorkItemClassificationNode[]>
*/
getRootNodes(project: string, depth?: number): IPromise<Contracts.WorkItemClassificationNode[]>;
/**
* @param {Contracts.WorkItemClassificationNode} postedNode
* @param {string} project - Project ID or project name
* @param {Contracts.TreeStructureGroup} structureGroup
* @param {string} path
* @return IPromise<Contracts.WorkItemClassificationNode>
*/
createOrUpdateClassificationNode(postedNode: Contracts.WorkItemClassificationNode, project: string, structureGroup: Contracts.TreeStructureGroup, path?: string): IPromise<Contracts.WorkItemClassificationNode>;
/**
* @param {string} project - Project ID or project name
* @param {Contracts.TreeStructureGroup} structureGroup
* @param {string} path
* @param {number} reclassifyId
* @return IPromise<void>
*/
deleteClassificationNode(project: string, structureGroup: Contracts.TreeStructureGroup, path?: string, reclassifyId?: number): IPromise<void>;
/**
* @param {string} project - Project ID or project name
* @param {Contracts.TreeStructureGroup} structureGroup
* @param {string} path
* @param {number} depth
* @return IPromise<Contracts.WorkItemClassificationNode>
*/
getClassificationNode(project: string, structureGroup: Contracts.TreeStructureGroup, path?: string, depth?: number): IPromise<Contracts.WorkItemClassificationNode>;
/**
* @param {Contracts.WorkItemClassificationNode} postedNode
* @param {string} project - Project ID or project name
* @param {Contracts.TreeStructureGroup} structureGroup
* @param {string} path
* @return IPromise<Contracts.WorkItemClassificationNode>
*/
updateClassificationNode(postedNode: Contracts.WorkItemClassificationNode, project: string, structureGroup: Contracts.TreeStructureGroup, path?: string): IPromise<Contracts.WorkItemClassificationNode>;
/**
* @param {string} field
* @return IPromise<Contracts.WorkItemField>
*/
getField(field: string): IPromise<Contracts.WorkItemField>;
/**
* @return IPromise<Contracts.WorkItemField[]>
*/
getFields(): IPromise<Contracts.WorkItemField[]>;
/**
* Returns history of all revision for a given work item ID
*
* @param {number} id
* @param {number} top
* @param {number} skip
* @return IPromise<Contracts.WorkItemHistory[]>
*/
getHistory(id: number, top?: number, skip?: number): IPromise<Contracts.WorkItemHistory[]>;
/**
* Returns the history value of particular revision
*
* @param {number} id
* @param {number} revisionNumber
* @return IPromise<Contracts.WorkItemHistory>
*/
getHistoryById(id: number, revisionNumber: number): IPromise<Contracts.WorkItemHistory>;
/**
* Creates a query, or moves a query.
*
* @param {Contracts.QueryHierarchyItem} postedQuery - The query to create.
* @param {string} project - Project ID or project name
* @param {string} query - The parent path for the query to create.
* @return IPromise<Contracts.QueryHierarchyItem>
*/
createQuery(postedQuery: Contracts.QueryHierarchyItem, project: string, query: string): IPromise<Contracts.QueryHierarchyItem>;
/**
* @param {string} project - Project ID or project name
* @param {string} query
* @return IPromise<void>
*/
deleteQuery(project: string, query: string): IPromise<void>;
/**
* Retrieves all queries the user has access to in the current project
*
* @param {string} project - Project ID or project name
* @param {Contracts.QueryExpand} expand
* @param {number} depth
* @param {boolean} includeDeleted
* @return IPromise<Contracts.QueryHierarchyItem[]>
*/
getQueries(project: string, expand?: Contracts.QueryExpand, depth?: number, includeDeleted?: boolean): IPromise<Contracts.QueryHierarchyItem[]>;
/**
* Retrieves a single query by project and either id or path
*
* @param {string} project - Project ID or project name
* @param {string} query
* @param {Contracts.QueryExpand} expand
* @param {number} depth
* @param {boolean} includeDeleted
* @return IPromise<Contracts.QueryHierarchyItem>
*/
getQuery(project: string, query: string, expand?: Contracts.QueryExpand, depth?: number, includeDeleted?: boolean): IPromise<Contracts.QueryHierarchyItem>;
/**
* @param {Contracts.QueryHierarchyItem} queryUpdate
* @param {string} project - Project ID or project name
* @param {string} query
* @param {boolean} undeleteDescendants
* @return IPromise<Contracts.QueryHierarchyItem>
*/
updateQuery(queryUpdate: Contracts.QueryHierarchyItem, project: string, query: string, undeleteDescendants?: boolean): IPromise<Contracts.QueryHierarchyItem>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} id
* @param {string} project - Project ID or project name
* @return IPromise<void>
*/
destroyWorkItem(id: number, project?: string): IPromise<void>;
/**
* @exemptedapi
* [Preview API]
*
* @param {number} id
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.WorkItemDelete>
*/
getDeletedWorkItem(id: number, project?: string): IPromise<Contracts.WorkItemDelete>;
/**
* @exemptedapi
* [Preview API]
*
* @param {string} project - Project ID or project name
* @param {number[]} ids
* @return IPromise<Contracts.WorkItemDeleteReference[]>
*/
getDeletedWorkItems(project?: string, ids?: number[]): IPromise<Contracts.WorkItemDeleteReference[]>;
/**
* @exemptedapi
* [Preview API]
*
* @param {Contracts.WorkItemDeleteUpdate} payload
* @param {number} id
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.WorkItemDelete>
*/
restoreWorkItem(payload: Contracts.WorkItemDeleteUpdate, id: number, project?: string): IPromise<Contracts.WorkItemDelete>;
/**
* Returns a fully hydrated work item for the requested revision
*
* @param {number} id
* @param {number} revisionNumber
* @param {Contracts.WorkItemExpand} expand
* @return IPromise<Contracts.WorkItem>
*/
getRevision(id: number, revisionNumber: number, expand?: Contracts.WorkItemExpand): IPromise<Contracts.WorkItem>;
/**
* Returns the list of fully hydrated work item revisions, paged.
*
* @param {number} id
* @param {number} top
* @param {number} skip
* @param {Contracts.WorkItemExpand} expand
* @return IPromise<Contracts.WorkItem[]>
*/
getRevisions(id: number, top?: number, skip?: number, expand?: Contracts.WorkItemExpand): IPromise<Contracts.WorkItem[]>;
/**
* Validates the fields values.
*
* @param {Contracts.FieldsToEvaluate} ruleEngineInput
* @return IPromise<void>
*/
evaluateRulesOnField(ruleEngineInput: Contracts.FieldsToEvaluate): IPromise<void>;
/**
* Returns a single update for a work item
*
* @param {number} id
* @param {number} updateNumber
* @return IPromise<Contracts.WorkItemUpdate>
*/
getUpdate(id: number, updateNumber: number): IPromise<Contracts.WorkItemUpdate>;
/**
* Returns a the deltas between work item revisions
*
* @param {number} id
* @param {number} top
* @param {number} skip
* @return IPromise<Contracts.WorkItemUpdate[]>
*/
getUpdates(id: number, top?: number, skip?: number): IPromise<Contracts.WorkItemUpdate[]>;
/**
* Gets the results of the query.
*
* @param {Contracts.Wiql} wiql - The query containing the wiql.
* @param {string} project - Project ID or project name
* @param {string} team - Team ID or team name
* @return IPromise<Contracts.WorkItemQueryResult>
*/
queryByWiql(wiql: Contracts.Wiql, project?: string, team?: string): IPromise<Contracts.WorkItemQueryResult>;
/**
* Gets the results of the query by id.
*
* @param {string} id - The query id.
* @param {string} project - Project ID or project name
* @param {string} team - Team ID or team name
* @return IPromise<Contracts.WorkItemQueryResult>
*/
queryById(id: string, project?: string, team?: string): IPromise<Contracts.WorkItemQueryResult>;
/**
* Get a batch of work item links
*
* @param {string} project - Project ID or project name
* @param {string[]} types - A list of types to filter the results to specific work item types. Omit this parameter to get work item links of all work item types.
* @param {string} continuationToken - Specifies the continuationToken to start the batch from. Omit this parameter to get the first batch of links.
* @param {Date} startDateTime - Date/time to use as a starting point for link changes. Only link changes that occurred after that date/time will be returned. Cannot be used in conjunction with 'watermark' parameter.
* @return IPromise<Contracts.ReportingWorkItemLinksBatch>
*/
getReportingLinks(project?: string, types?: string[], continuationToken?: string, startDateTime?: Date): IPromise<Contracts.ReportingWorkItemLinksBatch>;
/**
* Gets the work item relation types.
*
* @param {string} relation
* @return IPromise<Contracts.WorkItemRelationType>
*/
getRelationType(relation: string): IPromise<Contracts.WorkItemRelationType>;
/**
* @return IPromise<Contracts.WorkItemRelationType[]>
*/
getRelationTypes(): IPromise<Contracts.WorkItemRelationType[]>;
/**
* Get a batch of work item revisions with the option of including deleted items
*
* @param {string} project - Project ID or project name
* @param {string[]} fields - A list of fields to return in work item revisions. Omit this parameter to get all reportable fields.
* @param {string[]} types - A list of types to filter the results to specific work item types. Omit this parameter to get work item revisions of all work item types.
* @param {string} continuationToken - Specifies the watermark to start the batch from. Omit this parameter to get the first batch of revisions.
* @param {Date} startDateTime - Date/time to use as a starting point for revisions, all revisions will occur after this date/time. Cannot be used in conjunction with 'watermark' parameter.
* @param {boolean} includeIdentityRef - Return an identity reference instead of a string value for identity fields.
* @param {boolean} includeDeleted - Specify if the deleted item should be returned.
* @return IPromise<Contracts.ReportingWorkItemRevisionsBatch>
*/
readReportingRevisionsGet(project?: string, fields?: string[], types?: string[], continuationToken?: string, startDateTime?: Date, includeIdentityRef?: boolean, includeDeleted?: boolean): IPromise<Contracts.ReportingWorkItemRevisionsBatch>;
/**
* Get a batch of work item revisions
*
* @param {Contracts.ReportingWorkItemRevisionsFilter} filter - An object that contains request settings: field filter, type filter, identity format
* @param {string} project - Project ID or project name
* @param {string} continuationToken - Specifies the watermark to start the batch from. Omit this parameter to get the first batch of revisions.
* @param {Date} startDateTime - Date/time to use as a starting point for revisions, all revisions will occur after this date/time. Cannot be used in conjunction with 'watermark' parameter.
* @return IPromise<Contracts.ReportingWorkItemRevisionsBatch>
*/
readReportingRevisionsPost(filter: Contracts.ReportingWorkItemRevisionsFilter, project?: string, continuationToken?: string, startDateTime?: Date): IPromise<Contracts.ReportingWorkItemRevisionsBatch>;
/**
* @param {number} id
* @param {boolean} destroy
* @return IPromise<Contracts.WorkItemDelete>
*/
deleteWorkItem(id: number, destroy?: boolean): IPromise<Contracts.WorkItemDelete>;
/**
* Returns a single work item
*
* @param {number} id
* @param {string[]} fields
* @param {Date} asOf
* @param {Contracts.WorkItemExpand} expand
* @return IPromise<Contracts.WorkItem>
*/
getWorkItem(id: number, fields?: string[], asOf?: Date, expand?: Contracts.WorkItemExpand): IPromise<Contracts.WorkItem>;
/**
* Returns a list of work items
*
* @param {number[]} ids
* @param {string[]} fields
* @param {Date} asOf
* @param {Contracts.WorkItemExpand} expand
* @return IPromise<Contracts.WorkItem[]>
*/
getWorkItems(ids: number[], fields?: string[], asOf?: Date, expand?: Contracts.WorkItemExpand): IPromise<Contracts.WorkItem[]>;
/**
* @param {VSS_Common_Contracts.JsonPatchDocument} document
* @param {number} id
* @param {boolean} validateOnly
* @param {boolean} bypassRules
* @return IPromise<Contracts.WorkItem>
*/
updateWorkItem(document: VSS_Common_Contracts.JsonPatchDocument, id: number, validateOnly?: boolean, bypassRules?: boolean): IPromise<Contracts.WorkItem>;
/**
* @param {VSS_Common_Contracts.JsonPatchDocument} document
* @param {string} project - Project ID or project name
* @param {string} type
* @param {boolean} validateOnly
* @param {boolean} bypassRules
* @return IPromise<Contracts.WorkItem>
*/
createWorkItem(document: VSS_Common_Contracts.JsonPatchDocument, project: string, type: string, validateOnly?: boolean, bypassRules?: boolean): IPromise<Contracts.WorkItem>;
/**
* Returns a single work item from a template
*
* @param {string} project - Project ID or project name
* @param {string} type
* @param {string} fields
* @param {Date} asOf
* @param {Contracts.WorkItemExpand} expand
* @return IPromise<Contracts.WorkItem>
*/
getWorkItemTemplate(project: string, type: string, fields?: string, asOf?: Date, expand?: Contracts.WorkItemExpand): IPromise<Contracts.WorkItem>;
/**
* @param {VSS_Common_Contracts.JsonPatchDocument} document
* @param {string} project - Project ID or project name
* @param {string} type
* @param {boolean} validateOnly
* @param {boolean} bypassRules
* @return IPromise<Contracts.WorkItem>
*/
updateWorkItemTemplate(document: VSS_Common_Contracts.JsonPatchDocument, project: string, type: string, validateOnly?: boolean, bypassRules?: boolean): IPromise<Contracts.WorkItem>;
/**
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.WorkItemTypeCategory[]>
*/
getWorkItemTypeCategories(project: string): IPromise<Contracts.WorkItemTypeCategory[]>;
/**
* Returns a the deltas between work item revisions
*
* @param {string} project - Project ID or project name
* @param {string} category
* @return IPromise<Contracts.WorkItemTypeCategory>
*/
getWorkItemTypeCategory(project: string, category: string): IPromise<Contracts.WorkItemTypeCategory>;
/**
* Returns a the deltas between work item revisions
*
* @param {string} project - Project ID or project name
* @param {string} type
* @return IPromise<Contracts.WorkItemType>
*/
getWorkItemType(project: string, type: string): IPromise<Contracts.WorkItemType>;
/**
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.WorkItemType[]>
*/
getWorkItemTypes(project: string): IPromise<Contracts.WorkItemType[]>;
/**
* Returns the dependent fields for the corresponding workitem type and fieldname
*
* @param {string} project - Project ID or project name
* @param {string} type
* @param {string} field
* @return IPromise<Contracts.FieldDependentRule>
*/
getDependentFields(project: string, type: string, field: string): IPromise<Contracts.FieldDependentRule>;
/**
* Export work item type
*
* @param {string} project - Project ID or project name
* @param {string} type
* @param {boolean} exportGlobalLists
* @return IPromise<Contracts.WorkItemTypeTemplate>
*/
exportWorkItemTypeDefinition(project?: string, type?: string, exportGlobalLists?: boolean): IPromise<Contracts.WorkItemTypeTemplate>;
/**
* Add/updates a work item type
*
* @param {Contracts.WorkItemTypeTemplateUpdateModel} updateModel
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.ProvisioningResult>
*/
updateWorkItemTypeDefinition(updateModel: Contracts.WorkItemTypeTemplateUpdateModel, project?: string): IPromise<Contracts.ProvisioningResult>;
}
export class WorkItemTrackingHttpClient extends WorkItemTrackingHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return WorkItemTrackingHttpClient2_2
*/
export function getClient(): WorkItemTrackingHttpClient2_2;
}
declare module "TFS/WorkItemTracking/Services" {
import Contracts_Platform = require("VSS/Common/Contracts/Platform");
import WitContracts = require("TFS/WorkItemTracking/Contracts");
/**
* Host service for opening the work item form
*/
export interface IWorkItemFormNavigationService {
/**
* Open the specified work item. The host page will display the work item in a dialog,
* or it may update the current page view, depending on the current page.
*
* @param workItemId The id of the work item to open
* @param openInNewTab If true, open the work item in a new tab
*/
openWorkItem(workItemId: number, openInNewTab?: boolean): any;
/**
* Open a new work item of the specified type. The host page will display the new work item in a dialog,
* or it may update the current page view, depending on the current page.
*
* @param workItemTypeName The name of the work item type to open
* @param initialValues (Optional) A dictionary of any initial field values to set after opening the new work item.
*/
openNewWorkItem(workItemTypeName: string, initialValues?: {
[fieldName: string]: any;
}): any;
}
/**
* Host service for opening the work item form
*/
export module WorkItemFormNavigationService {
var contributionId: string;
/**
* Get an instance of the host work item service
*
* @param webContext Optional web context to scope the service to
*/
function getService(webContext?: Contracts_Platform.WebContext): IPromise<IWorkItemFormNavigationService>;
}
/**
* Host service for interacting with the currently active work item form (work item currently displayed in the UI).
*/
export interface IWorkItemFormService {
/**
* Gets id of active work item.
*/
getId(): number;
/**
* Gets active work item's latest revision.
*/
getRevision(): number;
/**
* Gets active work item fields. Returns an array of work item field.
*/
getFields(): WitContracts.WorkItemField[];
/**
* Gets field value of active work item. Returns field value.
*
* @param fieldReferenceName Field reference name
* @param returnOriginalValue Optional setting to whether getting the unsaved values or not. Default is false.
*/
getFieldValue(fieldReferenceName: string, returnOriginalValue?: boolean): Object;
/**
* Gets field values of active work item. Returns a dictionary of field refName/values.
*
* @param fieldReferenceNames An arrary of field reference names
* @param returnOriginalValue Optional setting to whether getting the unsaved values or not. Default is false.
*/
getFieldValues(fieldReferenceNames: string[], returnOriginalValue?: boolean): IDictionaryStringTo<Object>;
/**
* Sets field value of active work item. Returns true if successfull.
*
* @param fieldReferenceName Field reference name
* @param value Field value
*/
setFieldValue(fieldReferenceName: string, value: Object): boolean;
/**
* Sets field values of active work item. Returns a dictionary of field refName/results.
*
* @param fields A dictionary of field refName/values
*/
setFieldValues(fields: IDictionaryStringTo<Object>): IDictionaryStringTo<boolean>;
/**
* Returns true if active work item is dirty
*/
isDirty(): boolean;
/**
* Returns true if active work item is new
*/
isNew(): boolean;
/**
* Returns true if active work item fields are all valid
*/
isValid(): boolean;
/**
* Gets invalid fields. Returns an array of invalid work item fields.
*
*/
getInvalidFields(): WitContracts.WorkItemField[];
/**
* Adds work item links to active work item
*
* @param workItemLinks Work item link to add.
*/
addWorkItemRelations(workItemRelations: WitContracts.WorkItemRelation[]): void;
/**
* Remove work item links from active work item
*
* @param workItemLinks Work item link to remove.
*/
removeWorkItemRelations(workItemRelations: WitContracts.WorkItemRelation[]): void;
/**
* Returns array of work item relations
*/
getWorkItemRelations(): WitContracts.WorkItemRelation[];
/**
* Returns resource url of specified workitem
*/
getWorkItemResourceUrl(workItemId: number): string;
/**
* Returns array of work item relation types
*/
getWorkItemRelationTypes(): WitContracts.WorkItemRelationType[];
/**
* Returns true if active work item available
*/
hasActiveWorkItem(): boolean;
/**
* Saves active work item
*/
beginSaveWorkItem(successCallback: () => void, errorCallback: () => void): void;
}
export module WorkItemFormService {
var contributionId: string;
/**
* Get an instance of the host work item service
*
* @param webContext Optional web context to scope the service to
*/
function getService(webContext?: Contracts_Platform.WebContext): IPromise<IWorkItemFormService>;
}
}
declare module "TFS/WorkItemTracking/UIContracts" {
import WitContracts = require("TFS/WorkItemTracking/Contracts");
/**
* A query result in the WIT UI
*/
export interface QueryResultWorkItemContext {
columns: string[];
rows: any[];
query: WitContracts.QueryHierarchyItem;
}
/**
* A work item query in the WIT UI
*/
export interface WorkItemQueryContext {
query: WitContracts.QueryHierarchyItem;
}
}
declare module "TFS/Work/Contracts" {
import System_Contracts = require("VSS/Common/Contracts/System");
export interface Activity {
capacityPerDay: number;
name: string;
}
export interface attribute {
}
export interface Board extends ShallowReference {
_links: any;
allowedMappings: {
[key: string]: {
[key: string]: string[];
};
};
canEdit: boolean;
columns: BoardColumn[];
isValid: boolean;
revision: number;
rows: BoardRow[];
}
export interface BoardCardRuleSettings {
_links: any;
rules: {
[key: string]: Rule[];
};
url: string;
}
export interface BoardCardSettings {
cards: {
[key: string]: FieldSetting[];
};
}
export interface BoardChart extends BoardChartReference {
/**
* The links for the resource
*/
_links: any;
/**
* The settings for the resource
*/
settings: {
[key: string]: any;
};
}
export interface BoardChartReference {
/**
* Name of the resource
*/
name: string;
/**
* Full http link to the resource
*/
url: string;
}
export interface BoardColumn {
columnType: BoardColumnType;
description: string;
id: string;
isSplit: boolean;
itemLimit: number;
name: string;
stateMappings: {
[key: string]: string;
};
}
export enum BoardColumnType {
Incoming = 0,
InProgress = 1,
Outgoing = 2,
}
export interface BoardReference extends ShallowReference {
}
export interface BoardRow {
id: string;
name: string;
}
export interface BoardSuggestedValue {
name: string;
}
export enum BugsBehavior {
Off = 0,
AsRequirements = 1,
AsTasks = 2,
}
/**
* Expected data from PATCH
*/
export interface CapacityPatch {
activities: Activity[];
daysOff: DateRange[];
}
export interface DateRange {
/**
* End of the date range.
*/
end: Date;
/**
* Start of the date range.
*/
start: Date;
}
/**
* An abstracted reference to a field
*/
export interface FieldReference {
/**
* fieldRefName for the field
*/
referenceName: string;
/**
* Full http link to more information about the field
*/
url: string;
}
export interface FieldSetting {
}
export interface FilterClause {
fieldName: string;
index: number;
logicalOperator: string;
operator: string;
value: string;
}
export interface Member {
displayName: string;
id: string;
imageUrl: string;
uniqueName: string;
url: string;
}
export interface Rule {
clauses: FilterClause[];
filter: string;
isEnabled: string;
name: string;
settings: attribute;
}
/**
* An abstracted reference to some other resource. This class is used to provide the board data contracts with a uniform way to reference other resources in a way that provides easy traversal through links.
*/
export interface ShallowReference {
/**
* Id of the resource
*/
id: string;
/**
* Name of the resource
*/
name: string;
/**
* Full http link to the resource
*/
url: string;
}
/**
* Represents a single TeamFieldValue
*/
export interface TeamFieldValue {
includeChildren: boolean;
value: string;
}
/**
* Essentially a collection of team field values
*/
export interface TeamFieldValues extends TeamSettingsDataContractBase {
/**
* The default team field value
*/
defaultValue: string;
/**
* Shallow ref to the field being used as a team field
*/
field: FieldReference;
/**
* Collection of all valid team field values
*/
values: TeamFieldValue[];
}
/**
* Expected data from PATCH
*/
export interface TeamFieldValuesPatch {
defaultValue: string;
values: TeamFieldValue[];
}
export interface TeamIterationAttributes {
finishDate: Date;
startDate: Date;
}
/**
* Represents capacity for a specific team member
*/
export interface TeamMemberCapacity extends TeamSettingsDataContractBase {
/**
* Collection of capacities associated with the team member
*/
activities: Activity[];
/**
* The days off associated with the team member
*/
daysOff: DateRange[];
/**
* Shallow Ref to the associated team member
*/
teamMember: Member;
}
/**
* Data contract for TeamSettings
*/
export interface TeamSetting extends TeamSettingsDataContractBase {
/**
* Default Iteration
*/
backlogIteration: TeamSettingsIteration;
/**
* Information about categories that are visible on the backlog.
*/
backlogVisibilities: {
[key: string]: boolean;
};
/**
* BugsBehavior (Off, AsTasks, AsRequirements, ...)
*/
bugsBehavior: BugsBehavior;
/**
* Days that the team is working
*/
workingDays: System_Contracts.DayOfWeek[];
}
/**
* Base class for TeamSettings data contracts. Anything common goes here.
*/
export interface TeamSettingsDataContractBase {
/**
* Collection of links relevant to resource
*/
_links: any;
/**
* Full http link to the resource
*/
url: string;
}
export interface TeamSettingsDaysOff extends TeamSettingsDataContractBase {
daysOff: DateRange[];
}
export interface TeamSettingsDaysOffPatch {
daysOff: DateRange[];
}
/**
* Represents a shallow ref for a single iteration
*/
export interface TeamSettingsIteration extends TeamSettingsDataContractBase {
/**
* Attributes such as start and end date
*/
attributes: TeamIterationAttributes;
/**
* Id of the resource
*/
id: string;
/**
* Name of the resource
*/
name: string;
/**
* Relative path of the iteration
*/
path: string;
}
/**
* Data contract for what we expect to receive when PATCH
*/
export interface TeamSettingsPatch {
backlogIteration: string;
backlogVisibilities: {
[key: string]: boolean;
};
bugsBehavior: BugsBehavior;
workingDays: System_Contracts.DayOfWeek[];
}
export var TypeInfo: {
Activity: {
fields: any;
};
attribute: {
fields: any;
};
Board: {
fields: any;
};
BoardCardRuleSettings: {
fields: any;
};
BoardCardSettings: {
fields: any;
};
BoardChart: {
fields: any;
};
BoardChartReference: {
fields: any;
};
BoardColumn: {
fields: any;
};
BoardColumnType: {
enumValues: {
"incoming": number;
"inProgress": number;
"outgoing": number;
};
};
BoardReference: {
fields: any;
};
BoardRow: {
fields: any;
};
BoardSuggestedValue: {
fields: any;
};
BugsBehavior: {
enumValues: {
"off": number;
"asRequirements": number;
"asTasks": number;
};
};
CapacityPatch: {
fields: any;
};
DateRange: {
fields: any;
};
FieldReference: {
fields: any;
};
FieldSetting: {
fields: any;
};
FilterClause: {
fields: any;
};
Member: {
fields: any;
};
Rule: {
fields: any;
};
ShallowReference: {
fields: any;
};
TeamFieldValue: {
fields: any;
};
TeamFieldValues: {
fields: any;
};
TeamFieldValuesPatch: {
fields: any;
};
TeamIterationAttributes: {
fields: any;
};
TeamMemberCapacity: {
fields: any;
};
TeamSetting: {
fields: any;
};
TeamSettingsDataContractBase: {
fields: any;
};
TeamSettingsDaysOff: {
fields: any;
};
TeamSettingsDaysOffPatch: {
fields: any;
};
TeamSettingsIteration: {
fields: any;
};
TeamSettingsPatch: {
fields: any;
};
};
}
declare module "TFS/Work/RestClient" {
import Contracts = require("TFS/Work/Contracts");
import TFS_Core_Contracts = require("TFS/Core/Contracts");
import VSS_WebApi = require("VSS/WebApi/RestClient");
export class WorkHttpClient2_2 extends VSS_WebApi.VssHttpClient {
static serviceInstanceId: string;
constructor(rootRequestPath: string);
/**
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BoardSuggestedValue[]>
*/
getColumnSuggestedValues(project?: string): IPromise<Contracts.BoardSuggestedValue[]>;
/**
* @param {string} project - Project ID or project name
* @return IPromise<Contracts.BoardSuggestedValue[]>
*/
getRowSuggestedValues(project?: string): IPromise<Contracts.BoardSuggestedValue[]>;
/**
* Get board
*
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} id - identifier for board, either category plural name (Eg:"Stories") or guid
* @return IPromise<Contracts.Board>
*/
getBoard(teamContext: TFS_Core_Contracts.TeamContext, id: string): IPromise<Contracts.Board>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @return IPromise<Contracts.BoardReference[]>
*/
getBoards(teamContext: TFS_Core_Contracts.TeamContext): IPromise<Contracts.BoardReference[]>;
/**
* Update board options
*
* @param {{ [key: string] : string; }} options - options to updated
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} id - identifier for board, either category plural name (Eg:"Stories") or guid
* @return IPromise<{ [key: string] : string; }>
*/
setBoardOptions(options: {
[key: string]: string;
}, teamContext: TFS_Core_Contracts.TeamContext, id: string): IPromise<{
[key: string]: string;
}>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} iterationId
* @return IPromise<Contracts.TeamMemberCapacity[]>
*/
getCapacities(teamContext: TFS_Core_Contracts.TeamContext, iterationId: string): IPromise<Contracts.TeamMemberCapacity[]>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} iterationId
* @param {string} teamMemberId
* @return IPromise<Contracts.TeamMemberCapacity>
*/
getCapacity(teamContext: TFS_Core_Contracts.TeamContext, iterationId: string, teamMemberId: string): IPromise<Contracts.TeamMemberCapacity>;
/**
* @param {Contracts.TeamMemberCapacity[]} capacities
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} iterationId
* @return IPromise<Contracts.TeamMemberCapacity[]>
*/
replaceCapacities(capacities: Contracts.TeamMemberCapacity[], teamContext: TFS_Core_Contracts.TeamContext, iterationId: string): IPromise<Contracts.TeamMemberCapacity[]>;
/**
* @param {Contracts.CapacityPatch} patch
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} iterationId
* @param {string} teamMemberId
* @return IPromise<Contracts.TeamMemberCapacity>
*/
updateCapacity(patch: Contracts.CapacityPatch, teamContext: TFS_Core_Contracts.TeamContext, iterationId: string, teamMemberId: string): IPromise<Contracts.TeamMemberCapacity>;
/**
* @exemptedapi
* [Preview API] Get board card Rule settings for the board id or board by name
*
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardCardRuleSettings>
*/
getBoardCardRuleSettings(teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardCardRuleSettings>;
/**
* @exemptedapi
* [Preview API] Update board card Rule settings for the board id or board by name
*
* @param {Contracts.BoardCardRuleSettings} boardCardRuleSettings
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardCardRuleSettings>
*/
updateBoardCardRuleSettings(boardCardRuleSettings: Contracts.BoardCardRuleSettings, teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardCardRuleSettings>;
/**
* Get board card settings for the board id or board by name
*
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardCardSettings>
*/
getBoardCardSettings(teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardCardSettings>;
/**
* Update board card settings for the board id or board by name
*
* @param {Contracts.BoardCardSettings} boardCardSettingsToSave
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardCardSettings>
*/
updateBoardCardSettings(boardCardSettingsToSave: Contracts.BoardCardSettings, teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardCardSettings>;
/**
* Get a board chart
*
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board - Identifier for board, either category plural name (Eg:"Stories") or Guid
* @param {string} name - The chart name
* @return IPromise<Contracts.BoardChart>
*/
getBoardChart(teamContext: TFS_Core_Contracts.TeamContext, board: string, name: string): IPromise<Contracts.BoardChart>;
/**
* Get board charts
*
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board - Identifier for board, either category plural name (Eg:"Stories") or Guid
* @return IPromise<Contracts.BoardChartReference[]>
*/
getBoardCharts(teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardChartReference[]>;
/**
* Update a board chart
*
* @param {Contracts.BoardChart} chart
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board - Identifier for board, either category plural name (Eg:"Stories") or Guid
* @param {string} name - The chart name
* @return IPromise<Contracts.BoardChart>
*/
updateBoardChart(chart: Contracts.BoardChart, teamContext: TFS_Core_Contracts.TeamContext, board: string, name: string): IPromise<Contracts.BoardChart>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardColumn[]>
*/
getBoardColumns(teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardColumn[]>;
/**
* @param {Contracts.BoardColumn[]} boardColumns
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardColumn[]>
*/
updateBoardColumns(boardColumns: Contracts.BoardColumn[], teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardColumn[]>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} id
* @return IPromise<void>
*/
deleteTeamIteration(teamContext: TFS_Core_Contracts.TeamContext, id: string): IPromise<void>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} id
* @return IPromise<Contracts.TeamSettingsIteration>
*/
getTeamIteration(teamContext: TFS_Core_Contracts.TeamContext, id: string): IPromise<Contracts.TeamSettingsIteration>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} timeframe
* @return IPromise<Contracts.TeamSettingsIteration[]>
*/
getTeamIterations(teamContext: TFS_Core_Contracts.TeamContext, timeframe?: string): IPromise<Contracts.TeamSettingsIteration[]>;
/**
* @param {Contracts.TeamSettingsIteration} iteration
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @return IPromise<Contracts.TeamSettingsIteration>
*/
postTeamIteration(iteration: Contracts.TeamSettingsIteration, teamContext: TFS_Core_Contracts.TeamContext): IPromise<Contracts.TeamSettingsIteration>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardRow[]>
*/
getBoardRows(teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardRow[]>;
/**
* @param {Contracts.BoardRow[]} boardRows
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} board
* @return IPromise<Contracts.BoardRow[]>
*/
updateBoardRows(boardRows: Contracts.BoardRow[], teamContext: TFS_Core_Contracts.TeamContext, board: string): IPromise<Contracts.BoardRow[]>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} iterationId
* @return IPromise<Contracts.TeamSettingsDaysOff>
*/
getTeamDaysOff(teamContext: TFS_Core_Contracts.TeamContext, iterationId: string): IPromise<Contracts.TeamSettingsDaysOff>;
/**
* @param {Contracts.TeamSettingsDaysOffPatch} daysOffPatch
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @param {string} iterationId
* @return IPromise<Contracts.TeamSettingsDaysOff>
*/
updateTeamDaysOff(daysOffPatch: Contracts.TeamSettingsDaysOffPatch, teamContext: TFS_Core_Contracts.TeamContext, iterationId: string): IPromise<Contracts.TeamSettingsDaysOff>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @return IPromise<Contracts.TeamFieldValues>
*/
getTeamFieldValues(teamContext: TFS_Core_Contracts.TeamContext): IPromise<Contracts.TeamFieldValues>;
/**
* @param {Contracts.TeamFieldValuesPatch} patch
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @return IPromise<Contracts.TeamFieldValues>
*/
updateTeamFieldValues(patch: Contracts.TeamFieldValuesPatch, teamContext: TFS_Core_Contracts.TeamContext): IPromise<Contracts.TeamFieldValues>;
/**
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @return IPromise<Contracts.TeamSetting>
*/
getTeamSettings(teamContext: TFS_Core_Contracts.TeamContext): IPromise<Contracts.TeamSetting>;
/**
* @param {Contracts.TeamSettingsPatch} teamSettingsPatch
* @param {TFS_Core_Contracts.TeamContext} teamContext - The team context for the operation
* @return IPromise<Contracts.TeamSetting>
*/
updateTeamSettings(teamSettingsPatch: Contracts.TeamSettingsPatch, teamContext: TFS_Core_Contracts.TeamContext): IPromise<Contracts.TeamSetting>;
}
export class WorkHttpClient extends WorkHttpClient2_2 {
constructor(rootRequestPath: string);
}
/**
* Gets an http client targeting the latest released version of the APIs.
*
* @return WorkHttpClient2_2
*/
export function getClient(): WorkHttpClient2_2;
} | };
TreeStructureGroup: {
enumValues: {
|
codepage437.rs | /// Converts a unicode character to a CP437 equivalent, returning 0 if it didn't have a match
pub fn to_cp437(c: char) -> u8 {
match c {
'☺' => 1,
'☻' => 2,
'♥' => 3,
'♦' => 4,
'♣' => 5,
'♠' => 6,
'•' => 7,
'◘' => 8,
'○' => 9,
'◙' => 10,
'♂' => 11,
'♀' => 12,
'♪' => 13,
'♫' => 14,
'☼' => 15,
'►' => 16,
'◄' => 17,
'↕' => 18,
'‼' => 19,
'¶' => 20,
'§' => 21,
'▬' => 22,
'↨' => 23,
'↑' => 24,
'↓' => 25,
'→' => 26,
'←' => 27,
'∟' => 28,
'↔' => 29,
'▲' => 30,
'▼' => 31,
' ' => 32,
'!' => 33,
'"' => 34,
'#' => 35,
'$' => 36,
'%' => 37,
'&' => 38,
'\'' => 39,
'(' => 40,
')' => 41,
'*' => 42,
'+' => 43,
',' => 44,
'-' => 45,
'.' => 46,
'/' => 47,
'0' => 48,
'1' => 49,
'2' => 50,
'3' => 51,
'4' => 52,
'5' => 53,
'6' => 54,
'7' => 55,
'8' => 56,
'9' => 57,
':' => 58,
';' => 59,
'<' => 60,
'=' => 61,
'>' => 62,
'?' => 63,
'@' => 64,
'A' => 65,
'B' => 66,
'C' => 67,
'D' => 68,
'E' => 69,
'F' => 70,
'G' => 71,
'H' => 72,
'I' => 73,
'J' => 74,
'K' => 75,
'L' => 76,
'M' => 77,
'N' => 78,
'O' => 79,
'P' => 80,
'Q' => 81,
'R' => 82,
'S' => 83,
'T' => 84,
'U' => 85,
'V' => 86,
'W' => 87,
'X' => 88,
'Y' => 89,
'Z' => 90,
'[' => 91,
'\\' => 92,
']' => 93,
'^' => 94,
'_' => 95,
'`' => 96,
'a' => 97,
'b' => 98,
'c' => 99,
'd' => 100,
'e' => 101,
'f' => 102,
'g' => 103,
'h' => 104,
'i' => 105,
'j' => 106,
'k' => 107,
'l' => 108,
'm' => 109,
'n' => 110,
'o' => 111,
'p' => 112,
'q' => 113,
'r' => 114,
's' => 115,
't' => 116,
'u' => 117,
'v' => 118,
'w' => 119,
'x' => 120,
'y' => 121,
'z' => 122,
'{' => 123,
'|' => 124,
'}' => 125,
'~' => 126,
'⌂' => 127,
'Ç' => 128,
'ü' => 129,
'é' => 130,
'â' => 131,
'ä' => 132,
'à' => 133,
'å' => 134,
'ç' => 135,
'ê' => 136,
'ë' => 137,
'è' => 138,
'ï' => 139,
'î' => 140,
'ì' => 141,
'Ä' => 142,
'Å' => 143,
'É' => 144,
'æ' => 145,
'Æ' => 146,
'ô' => 147,
'ö' => 148,
'ò' => 149,
'û' => 150,
'ù' => 151,
'ÿ' => 152,
'Ö' => 153,
'Ü' => 154,
'¢' => 155,
'£' => 156,
'¥' => 157,
'₧' => 158,
'ƒ' => 159,
'á' => 160,
'í' => 161,
'ó' => 162,
'ú' => 163,
'ñ' => 164,
'Ñ' => 165,
'ª' => 166,
'º' => 167,
'¿' => 168,
'⌐' => 169,
'¬' => 170,
'½' => 171,
'¼' => 172,
'¡' => 173,
'«' => 174,
'»' => 175,
'░' => 176,
'▒' => 177,
'▓' => 178,
'│' => 179,
'┤' => 180,
'╡' => 181,
'╢' => 182,
'╖' => 183,
'╕' => 184,
'╣' => 185,
'║' => 186,
'╗' => 187,
'╝' => 188,
'╜' => 189,
'╛' => 190,
'┐' => 191,
'└' => 192,
'┴' => 193,
'┬' => 194,
'├' => 195,
'─' => 196,
'┼' => 197,
'╞' => 198,
'╟' => 199,
'╚' => 200,
'╔' => 201,
'╩' => 202,
'╦' => 203,
'╠' => 204,
'═' => 205,
'╬' => 206,
'╧' => 207,
'╨' => 208,
'╤' => 209,
'╥' => 210,
'╙' => 211,
'╘' => 212,
'╒' => 213,
'╓' => 214,
'╫' => 215,
'╪' => 216,
'┘' => 217,
'┌' => 218,
'█' => 219,
'▄' => 220,
'▌' => 221,
'▐' => 222,
'▀' => 223,
'α' => 224,
'ß' => 225,
'Γ' => 226,
'π' => 227,
'Σ' => 228,
'σ' => 229,
'µ' => 230,
'τ' => 231,
'Φ' => 232,
'Θ' => 233,
'Ω' => 234,
'δ' => 235,
'∞' => 236,
'φ' => 237,
'ε' => 238,
'∩' => 239,
'≡' => 240,
'±' => 241,
'≥' => 242,
'≤' => 243,
'⌠' => 244,
'⌡' => 245,
'÷' => 246,
'≈' => 247,
'°' => 248,
'∙' => 249,
'·' => 250,
'√' => 251,
'ⁿ' => 252,
'²' => 253,
'■' => 254,
_ => 0,
}
} | pub fn string_to_cp437<S: AsRef<str>>(input: S) -> Vec<u8> {
input.as_ref().chars().map(to_cp437).collect()
}
#[cfg(test)]
mod tests {
use super::string_to_cp437;
#[test]
// Tests that we make an RGB triplet at defaults and it is black.
fn test_hello() {
let test: Vec<u8> = vec![72, 101, 108, 108, 111];
let convert = string_to_cp437("Hello");
assert_eq!(test, convert);
}
#[test]
// Tests that we make an RGB triplet at defaults and it is black.
fn test_fancy() {
let test: Vec<u8> = vec![171, 165, 176, 206, 234, 247];
let convert = string_to_cp437("½Ñ░╬Ω≈");
assert_eq!(test, convert);
}
#[test]
// Tests that we make an RGB triplet at defaults and it is black.
fn test_first_group() {
let test: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15];
let convert = string_to_cp437("☺☻♥♦♣♠•◘○◙♂♀♪♫☼");
assert_eq!(test, convert);
}
} |
/// Converts a string into a vector of u8, CP437 representations of the string |
bybit.rs | #[macro_use]
mod utils;
#[cfg(test)]
mod bybit_inverse_future {
use crypto_ws_client::{BybitInverseFutureWSClient, WSClient};
use std::sync::mpsc::{Receiver, Sender};
#[test]
fn subscribe() {
gen_test_code!(
BybitInverseFutureWSClient,
subscribe,
&vec!["trade.BTCUSDZ21".to_string()]
);
}
#[test]
fn subscribe_raw_json() {
gen_test_code!(
BybitInverseFutureWSClient,
subscribe,
&vec![r#"{"op":"subscribe","args":["trade.BTCUSDZ21"]}"#.to_string()]
);
}
#[test]
fn subscribe_trade() {
gen_test_code!(
BybitInverseFutureWSClient,
subscribe_trade,
&vec!["BTCUSDZ21".to_string()]
);
}
#[test]
fn subscribe_orderbook_topk() {
gen_test_code!(
BybitInverseFutureWSClient,
subscribe_orderbook_topk,
&vec!["BTCUSDZ21".to_string()]
);
}
#[test]
fn subscribe_orderbook() {
gen_test_code!(
BybitInverseFutureWSClient,
subscribe_orderbook,
&vec!["BTCUSDZ21".to_string()]
);
}
#[test]
fn subscribe_ticker() {
gen_test_code!(
BybitInverseFutureWSClient,
subscribe_ticker,
&vec!["BTCUSDZ21".to_string()]
);
}
#[test]
fn subscribe_candlestick() {
gen_test_subscribe_candlestick!(
BybitInverseFutureWSClient,
&vec![("BTCUSDZ21".to_string(), 60)]
);
gen_test_subscribe_candlestick!(
BybitInverseFutureWSClient,
&vec![("BTCUSDZ21".to_string(), 2592000)]
);
}
}
#[cfg(test)]
mod bybit_inverse_swap {
use crypto_ws_client::{BybitInverseSwapWSClient, WSClient};
use std::sync::mpsc::{Receiver, Sender};
#[test]
fn subscribe() {
gen_test_code!(
BybitInverseSwapWSClient,
subscribe,
&vec!["trade.BTCUSD".to_string()]
);
}
#[test]
fn subscribe_raw_json() {
gen_test_code!(
BybitInverseSwapWSClient,
subscribe,
&vec![r#"{"op":"subscribe","args":["trade.BTCUSD"]}"#.to_string()]
);
}
#[test]
fn subscribe_trade() {
gen_test_code!(
BybitInverseSwapWSClient,
subscribe_trade,
&vec!["BTCUSD".to_string()]
);
}
#[test]
fn subscribe_orderbook_topk() {
gen_test_code!(
BybitInverseSwapWSClient,
subscribe_orderbook_topk,
&vec!["BTCUSD".to_string()]
);
}
#[test]
fn subscribe_orderbook() {
gen_test_code!(
BybitInverseSwapWSClient,
subscribe_orderbook,
&vec!["BTCUSD".to_string()]
);
}
#[test]
fn | () {
gen_test_code!(
BybitInverseSwapWSClient,
subscribe_ticker,
&vec!["BTCUSD".to_string()]
);
}
#[test]
fn subscribe_candlestick() {
gen_test_subscribe_candlestick!(
BybitInverseSwapWSClient,
&vec![("BTCUSD".to_string(), 60)]
);
gen_test_subscribe_candlestick!(
BybitInverseSwapWSClient,
&vec![("BTCUSD".to_string(), 2592000)]
);
}
}
#[cfg(test)]
mod bybit_linear_swap {
use crypto_ws_client::{BybitLinearSwapWSClient, WSClient};
use std::sync::mpsc::{Receiver, Sender};
#[test]
fn subscribe_trade() {
gen_test_code!(
BybitLinearSwapWSClient,
subscribe_trade,
&vec!["BTCUSDT".to_string()]
);
}
#[test]
fn subscribe_orderbook_topk() {
gen_test_code!(
BybitLinearSwapWSClient,
subscribe_orderbook_topk,
&vec!["BTCUSDT".to_string()]
);
}
#[test]
fn subscribe_orderbook() {
gen_test_code!(
BybitLinearSwapWSClient,
subscribe_orderbook,
&vec!["BTCUSDT".to_string()]
);
}
#[test]
fn subscribe_ticker() {
gen_test_code!(
BybitLinearSwapWSClient,
subscribe_ticker,
&vec!["BTCUSDT".to_string()]
);
}
#[test]
fn subscribe_candlestick() {
gen_test_subscribe_candlestick!(
BybitLinearSwapWSClient,
&vec![("BTCUSDT".to_string(), 60)]
);
gen_test_subscribe_candlestick!(
BybitLinearSwapWSClient,
&vec![("BTCUSDT".to_string(), 2592000)]
);
}
}
| subscribe_ticker |
ipam_vlan_groups_create_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
// Copyright 2020 The go-netbox Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package ipam
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
"github.com/fbreckle/go-netbox/netbox/models"
)
// NewIpamVlanGroupsCreateParams creates a new IpamVlanGroupsCreateParams object
// with the default values initialized.
func NewIpamVlanGroupsCreateParams() *IpamVlanGroupsCreateParams {
var ()
return &IpamVlanGroupsCreateParams{
timeout: cr.DefaultTimeout,
}
}
// NewIpamVlanGroupsCreateParamsWithTimeout creates a new IpamVlanGroupsCreateParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewIpamVlanGroupsCreateParamsWithTimeout(timeout time.Duration) *IpamVlanGroupsCreateParams {
var ()
return &IpamVlanGroupsCreateParams{
timeout: timeout,
}
}
// NewIpamVlanGroupsCreateParamsWithContext creates a new IpamVlanGroupsCreateParams object
// with the default values initialized, and the ability to set a context for a request
func NewIpamVlanGroupsCreateParamsWithContext(ctx context.Context) *IpamVlanGroupsCreateParams {
var ()
return &IpamVlanGroupsCreateParams{
Context: ctx,
}
}
// NewIpamVlanGroupsCreateParamsWithHTTPClient creates a new IpamVlanGroupsCreateParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewIpamVlanGroupsCreateParamsWithHTTPClient(client *http.Client) *IpamVlanGroupsCreateParams {
var ()
return &IpamVlanGroupsCreateParams{
HTTPClient: client,
}
}
/*IpamVlanGroupsCreateParams contains all the parameters to send to the API endpoint
for the ipam vlan groups create operation typically these are written to a http.Request
*/
type IpamVlanGroupsCreateParams struct {
/*Data*/
Data *models.WritableVLANGroup
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) WithTimeout(timeout time.Duration) *IpamVlanGroupsCreateParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) WithContext(ctx context.Context) *IpamVlanGroupsCreateParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) WithHTTPClient(client *http.Client) *IpamVlanGroupsCreateParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithData adds the data to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) WithData(data *models.WritableVLANGroup) *IpamVlanGroupsCreateParams {
o.SetData(data)
return o
}
// SetData adds the data to the ipam vlan groups create params
func (o *IpamVlanGroupsCreateParams) SetData(data *models.WritableVLANGroup) {
o.Data = data
}
// WriteToRequest writes these params to a swagger request
func (o *IpamVlanGroupsCreateParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.Data != nil |
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| {
if err := r.SetBodyParam(o.Data); err != nil {
return err
}
} |
create_pipeline_version_parameters.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by go-swagger; DO NOT EDIT.
package pipeline_service
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
strfmt "github.com/go-openapi/strfmt"
pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model"
)
// NewCreatePipelineVersionParams creates a new CreatePipelineVersionParams object
// with the default values initialized.
func NewCreatePipelineVersionParams() *CreatePipelineVersionParams {
var ()
return &CreatePipelineVersionParams{
timeout: cr.DefaultTimeout,
}
}
// NewCreatePipelineVersionParamsWithTimeout creates a new CreatePipelineVersionParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewCreatePipelineVersionParamsWithTimeout(timeout time.Duration) *CreatePipelineVersionParams {
var ()
return &CreatePipelineVersionParams{
timeout: timeout,
}
}
// NewCreatePipelineVersionParamsWithContext creates a new CreatePipelineVersionParams object
// with the default values initialized, and the ability to set a context for a request
func NewCreatePipelineVersionParamsWithContext(ctx context.Context) *CreatePipelineVersionParams {
var ()
return &CreatePipelineVersionParams{
Context: ctx,
}
}
// NewCreatePipelineVersionParamsWithHTTPClient creates a new CreatePipelineVersionParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewCreatePipelineVersionParamsWithHTTPClient(client *http.Client) *CreatePipelineVersionParams {
var ()
return &CreatePipelineVersionParams{
HTTPClient: client,
}
}
/*CreatePipelineVersionParams contains all the parameters to send to the API endpoint
for the create pipeline version operation typically these are written to a http.Request
*/
type CreatePipelineVersionParams struct {
/*Body
ResourceReference inside PipelineVersion specifies the pipeline that this
version belongs to.
*/
Body *pipeline_model.APIPipelineVersion
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the create pipeline version params
func (o *CreatePipelineVersionParams) WithTimeout(timeout time.Duration) *CreatePipelineVersionParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the create pipeline version params
func (o *CreatePipelineVersionParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the create pipeline version params
func (o *CreatePipelineVersionParams) WithContext(ctx context.Context) *CreatePipelineVersionParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the create pipeline version params
func (o *CreatePipelineVersionParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the create pipeline version params
func (o *CreatePipelineVersionParams) WithHTTPClient(client *http.Client) *CreatePipelineVersionParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the create pipeline version params
func (o *CreatePipelineVersionParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithBody adds the body to the create pipeline version params
func (o *CreatePipelineVersionParams) WithBody(body *pipeline_model.APIPipelineVersion) *CreatePipelineVersionParams {
o.SetBody(body)
return o
}
// SetBody adds the body to the create pipeline version params
func (o *CreatePipelineVersionParams) SetBody(body *pipeline_model.APIPipelineVersion) {
o.Body = body
}
// WriteToRequest writes these params to a swagger request
func (o *CreatePipelineVersionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.Body != nil {
if err := r.SetBodyParam(o.Body); err != nil {
return err
}
}
if len(res) > 0 |
return nil
}
| {
return errors.CompositeValidationError(res...)
} |
tests.py | from django.test import TestCase
# Create your tests here. |
||
zh-hant.js | // This file was automatically generated. Do not modify.
'use strict';
Blockly.Msg["ADD_COMMENT"] = "加入註解";
Blockly.Msg["ARD_ANALOGWRITE"] = "Set PWM pin"; // untranslated
Blockly.Msg["ARD_DIGITALWRITE"] = "Set digital pin"; // untranslated
Blockly.Msg["ARD_PIN_WARN1"] = "Pin %1 is needed for %2 as pin %3. Already in use as %4."; // untranslated
Blockly.Msg["ARD_SERVO_READ"] = "Read servo pin"; // untranslated
Blockly.Msg["ARD_SERVO_WRITE"] = "Set servo pin"; // untranslated
Blockly.Msg["ARD_SERVO_WRITE_DEG_180"] = "degrees"; // untranslated
Blockly.Msg["ARD_SERVO_WRITE_TO"] = "angle to"; // untranslated
Blockly.Msg["ARD_TIME_DELAY"] = "during"; // untranslated
Blockly.Msg["ARD_TIME_DELAY_TIP"] = "Wait specific time in milliseconds"; // untranslated
Blockly.Msg["ARD_TIME_MS"] = "ms"; // untranslated
Blockly.Msg["ARD_WRITE_TO"] = "To"; // untranslated
Blockly.Msg["CANNOT_DELETE_VARIABLE_PROCEDURE"] = "無法刪除變數「%1」,因為這是功能「%2」定義的一部份內容";
Blockly.Msg["CHANGE_VALUE_TITLE"] = "修改值:";
Blockly.Msg["CLEAN_UP"] = "整理積木";
Blockly.Msg["COLLAPSED_WARNINGS_WARNING"] = "收合含有警告的區塊。";
Blockly.Msg["COLLAPSE_ALL"] = "收合積木";
Blockly.Msg["COLLAPSE_BLOCK"] = "收合區塊";
Blockly.Msg["COLOUR_BLEND_COLOUR1"] = "顏色 1";
Blockly.Msg["COLOUR_BLEND_COLOUR2"] = "顏色 2";
Blockly.Msg["COLOUR_BLEND_HELPURL"] = "https://meyerweb.com/eric/tools/color-blend/#:::rgbp"; // untranslated
Blockly.Msg["COLOUR_BLEND_RATIO"] = "比例";
Blockly.Msg["COLOUR_BLEND_TITLE"] = "混合";
Blockly.Msg["COLOUR_BLEND_TOOLTIP"] = "用一個給定的比率(0.0-1.0)混合兩種顏色。";
Blockly.Msg["COLOUR_PICKER_HELPURL"] = "https://zh.wikipedia.org/wiki/顏色";
Blockly.Msg["COLOUR_PICKER_TOOLTIP"] = "從調色板中選擇一種顏色。";
Blockly.Msg["COLOUR_RANDOM_HELPURL"] = "http://randomcolour.com"; // untranslated
Blockly.Msg["COLOUR_RANDOM_TITLE"] = "隨機顏色";
Blockly.Msg["COLOUR_RANDOM_TOOLTIP"] = "隨機選擇一種顏色。";
Blockly.Msg["COLOUR_RGB_BLUE"] = "藍";
Blockly.Msg["COLOUR_RGB_GREEN"] = "綠";
Blockly.Msg["COLOUR_RGB_HELPURL"] = "https://www.december.com/html/spec/colorpercompact.html"; // untranslated
Blockly.Msg["COLOUR_RGB_RED"] = "紅";
Blockly.Msg["COLOUR_RGB_TITLE"] = "顏色";
Blockly.Msg["COLOUR_RGB_TOOLTIP"] = "透過指定紅、綠、 藍色的值來建立一種顏色。所有的值必須介於 0 和 100 之間。";
Blockly.Msg["CONTROLS_FLOW_STATEMENTS_HELPURL"] = "https://github.com/google/blockly/wiki/Loops#loop-termination-blocks"; // untranslated
Blockly.Msg["CONTROLS_FLOW_STATEMENTS_OPERATOR_BREAK"] = "中斷循環";
Blockly.Msg["CONTROLS_FLOW_STATEMENTS_OPERATOR_CONTINUE"] = "繼續下一個循環";
Blockly.Msg["CONTROLS_FLOW_STATEMENTS_TOOLTIP_BREAK"] = "中斷當前的循環。";
Blockly.Msg["CONTROLS_FLOW_STATEMENTS_TOOLTIP_CONTINUE"] = "跳過這個循環的其餘步驟,並繼續下一次的循環。";
Blockly.Msg["CONTROLS_FLOW_STATEMENTS_WARNING"] = "警告:此區塊僅可用於循環內。";
Blockly.Msg["CONTROLS_FOREACH_HELPURL"] = "https://github.com/google/blockly/wiki/Loops#for-each"; // untranslated
Blockly.Msg["CONTROLS_FOREACH_INPUT_DO"] = ""; // untranslated
Blockly.Msg["CONTROLS_FOREACH_TITLE"] = "為列表 %2 裡的每一項 %1";
Blockly.Msg["CONTROLS_FOREACH_TOOLTIP"] = "遍歷每個清單中的項目,將變數「%1」設定到該項目中,然後執行某些陳述式。";
Blockly.Msg["CONTROLS_FOR_HELPURL"] = "https://github.com/google/blockly/wiki/Loops#count-with"; // untranslated
Blockly.Msg["CONTROLS_FOR_INPUT_DO"] = ""; // untranslated
Blockly.Msg["CONTROLS_FOR_TITLE"] = "循環計數 %1 從 %2 到 %3 每次增加 %4";
Blockly.Msg["CONTROLS_FOR_TOOLTIP"] = "從起始數到結尾數中取出變數「%1」的值,按指定的時間間隔,執行指定的區塊。";
Blockly.Msg["CONTROLS_IF_ELSEIF_TOOLTIP"] = "添加條件到「如果」積木。";
Blockly.Msg["CONTROLS_IF_ELSE_TOOLTIP"] = "加入一個最終、所有條件都執行的部份到「如果」區塊中。";
Blockly.Msg["CONTROLS_IF_HELPURL"] = "https://github.com/google/blockly/wiki/IfElse"; // untranslated
Blockly.Msg["CONTROLS_IF_IF_TOOLTIP"] = "添加、刪除或重新排列各部份以重新配置這個「如果」區塊。";
Blockly.Msg["CONTROLS_IF_MSG_ELSE"] = "否則";
Blockly.Msg["CONTROLS_IF_MSG_ELSEIF"] = "否則,如果";
Blockly.Msg["CONTROLS_IF_MSG_IF"] = "如果";
Blockly.Msg["CONTROLS_IF_MSG_THEN"] = "then"; // untranslated
Blockly.Msg["CONTROLS_IF_TOOLTIP_1"] = "當值為 true 時,執行一些陳述式。";
Blockly.Msg["CONTROLS_IF_TOOLTIP_2"] = "當值為 true 時,執行第一個陳述式。否則,執行第二個陳述式。";
Blockly.Msg["CONTROLS_IF_TOOLTIP_3"] = "如果第一個值為 true,則執行第一個陳述式。否則,當第二個值為 true 時,則執行第二個陳述式。";
Blockly.Msg["CONTROLS_IF_TOOLTIP_4"] = "如果第一個值為 true,則執行第一個陳述式。否則當第二個值為 true 時,則執行第二個陳述式。如果前幾個敘述都不為 ture,則執行最後一個陳述式。";
Blockly.Msg["CONTROLS_REPEAT_FOREVER_TITLE"] = "repeat forever"; // untranslated
Blockly.Msg["CONTROLS_REPEAT_HELPURL"] = "https://zh.wikipedia.org/wiki/For迴圈";
Blockly.Msg["CONTROLS_REPEAT_INPUT_DO"] = "執行";
Blockly.Msg["CONTROLS_REPEAT_TITLE"] = "重複%1次";
Blockly.Msg["CONTROLS_REPEAT_TOOLTIP"] = "重複執行指定的陳述式多次。";
Blockly.Msg["CONTROLS_WHILEUNTIL_HELPURL"] = "https://github.com/google/blockly/wiki/Loops#repeat"; // untranslated
Blockly.Msg["CONTROLS_WHILEUNTIL_INPUT_DO"] = ""; // untranslated
Blockly.Msg["CONTROLS_WHILEUNTIL_OPERATOR_UNTIL"] = "重複直到";
Blockly.Msg["CONTROLS_WHILEUNTIL_OPERATOR_WHILE"] = "重複,當";
Blockly.Msg["CONTROLS_WHILEUNTIL_TOOLTIP_UNTIL"] = "當值為 false 時,執行一些陳述式。";
Blockly.Msg["CONTROLS_WHILEUNTIL_TOOLTIP_WHILE"] = "當值為 true 時,執行一些陳述式。";
Blockly.Msg["DELETE_ALL_BLOCKS"] = "刪除全部 %1 個區塊?";
Blockly.Msg["DELETE_BLOCK"] = "刪除區塊";
Blockly.Msg["DELETE_VARIABLE"] = "刪除變數「%1」";
Blockly.Msg["DELETE_VARIABLE_CONFIRMATION"] = "刪除使用%1次的「%2」變數?";
Blockly.Msg["DELETE_X_BLOCKS"] = "刪除%1個積木";
Blockly.Msg["DISABLE_BLOCK"] = "停用區塊";
Blockly.Msg["DUPLICATE_BLOCK"] = "複製";
Blockly.Msg["DUPLICATE_COMMENT"] = "複製註解";
Blockly.Msg["ENABLE_BLOCK"] = "啟用積木";
Blockly.Msg["EXPAND_ALL"] = "展開積木";
Blockly.Msg["EXPAND_BLOCK"] = "展開區塊";
Blockly.Msg["EXTERNAL_INPUTS"] = "外部輸入";
Blockly.Msg["HELP"] = "說明";
Blockly.Msg["INLINE_INPUTS"] = "單行輸入";
Blockly.Msg["IOS_CANCEL"] = "取消";
Blockly.Msg["IOS_ERROR"] = "錯誤";
Blockly.Msg["IOS_OK"] = "確定";
Blockly.Msg["IOS_PROCEDURES_ADD_INPUT"] = "+ 添加輸入";
Blockly.Msg["IOS_PROCEDURES_ALLOW_STATEMENTS"] = "允許加入陳述式";
Blockly.Msg["IOS_PROCEDURES_DUPLICATE_INPUTS_ERROR"] = "此功能有多個輸入內容。";
Blockly.Msg["IOS_PROCEDURES_INPUTS"] = "輸入";
Blockly.Msg["IOS_VARIABLES_ADD_BUTTON"] = "添加";
Blockly.Msg["IOS_VARIABLES_ADD_VARIABLE"] = "+ 添加變數";
Blockly.Msg["IOS_VARIABLES_DELETE_BUTTON"] = "刪除";
Blockly.Msg["IOS_VARIABLES_EMPTY_NAME_ERROR"] = "您不能使用空的變數名稱。";
Blockly.Msg["IOS_VARIABLES_RENAME_BUTTON"] = "重新命名";
Blockly.Msg["IOS_VARIABLES_VARIABLE_NAME"] = "變數名稱";
Blockly.Msg["LEAPHY_ANALOG_READ"] = "Read anapin"; // untranslated
Blockly.Msg["LEAPHY_BUZZ_BUZZ"] = "Buzz"; // untranslated
Blockly.Msg["LEAPHY_BUZZ_HERTZ"] = "Hertz"; // untranslated
Blockly.Msg["LEAPHY_BUZZ_MS"] = "ms"; // untranslated
Blockly.Msg["LEAPHY_CLICK_CATEGORY"] = "Leaphy Click"; // untranslated
Blockly.Msg["LEAPHY_DIGITAL_READ"] = "Read digipin"; // untranslated
Blockly.Msg["LEAPHY_DISPLAY_CLEAR"] = "Clear display"; // untranslated
Blockly.Msg["LEAPHY_DISPLAY_DISPLAY"] = "Show on display"; // untranslated
Blockly.Msg["LEAPHY_DISPLAY_PRINT"] = "Display - Set Ln."; // untranslated
Blockly.Msg["LEAPHY_EXTRA_CATEGORY"] = "Leaphy Extra"; // untranslated
Blockly.Msg["LEAPHY_FLITZ_CATEGORY"] = "Leaphy Flitz"; // untranslated
Blockly.Msg["LEAPHY_FLITZ_LED"] = "Set LED"; // untranslated
Blockly.Msg["LEAPHY_FLITZ_LED_B"] = "Blue"; // untranslated
Blockly.Msg["LEAPHY_FLITZ_LED_G"] = "Green"; // untranslated
Blockly.Msg["LEAPHY_FLITZ_LED_R"] = "Red"; // untranslated
Blockly.Msg["LEAPHY_FUNCTIONS_CATEGORY"] = "Custom Blocks"; // untranslated
Blockly.Msg["LEAPHY_GET_DISTANCE"] = "Get distance"; // untranslated
Blockly.Msg["LEAPHY_LED"] = "Led"; // untranslated
Blockly.Msg["LEAPHY_LED_BASIC_BLUE"] = "B"; // untranslated
Blockly.Msg["LEAPHY_LED_BASIC_GREEN"] = "G"; // untranslated
Blockly.Msg["LEAPHY_LED_BASIC_LED"] = "Ledstrip basis - Led"; // untranslated
Blockly.Msg["LEAPHY_LED_BASIC_RED"] = "R"; // untranslated
Blockly.Msg["LEAPHY_LED_BLUE"] = "Blue"; // untranslated
Blockly.Msg["LEAPHY_LED_GREEN"] = "Green"; // untranslated
Blockly.Msg["LEAPHY_LED_RED"] = "Red"; // untranslated
Blockly.Msg["LEAPHY_LED_SET_LEDS"] = "Leds"; // untranslated
Blockly.Msg["LEAPHY_LED_SET_PIN"] = "Pin"; // untranslated
Blockly.Msg["LEAPHY_LED_SET_SPEEDVALUE"] = "Ledstrip demo - Speed"; // untranslated
Blockly.Msg["LEAPHY_LED_SET_STRIP"] = "Set LED Strip"; // untranslated
Blockly.Msg["LEAPHY_LED_STRIP_BREATHE"] = "Breathe"; // untranslated
Blockly.Msg["LEAPHY_LED_STRIP_COLORGULF"] = "Color Gulf"; // untranslated
Blockly.Msg["LEAPHY_LED_STRIP_DEMO"] = "Ledstrip demo"; // untranslated
Blockly.Msg["LEAPHY_LED_STRIP_GULF"] = "Gulf"; // untranslated
Blockly.Msg["LEAPHY_LED_STRIP_LIGHTBANK"] = "Lightbank"; // untranslated
Blockly.Msg["LEAPHY_LED_STRIP_RAINBOW"] = "Rainbow"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_A_DROPDOWN"] = "Motor_A"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_BACKWARD"] = "Backward"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_B_DROPDOWN"] = "Motor_B"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_DIRECTION"] = "Direction"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_FORWARD"] = "Forward"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_LEFT"] = "Left"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_LEFT_DROPDOWN"] = "Motor_L"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_RIGHT"] = "Right"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_RIGHT_DROPDOWN"] = "Motor_R"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_SPEED"] = "Speed"; // untranslated
Blockly.Msg["LEAPHY_MOTOR_TYPE"] = "Type"; // untranslated
Blockly.Msg["LEAPHY_NUMBERS_CATEGORY"] = "Numbers"; // untranslated
Blockly.Msg["LEAPHY_ORIGINAL_CATEGORY"] = "Leaphy Original"; // untranslated
Blockly.Msg["LEAPHY_READ_HAND"] = "Read Hand sensor"; // untranslated
Blockly.Msg["LEAPHY_READ_STOMACH"] = "Read Belly sensor"; // untranslated
Blockly.Msg["LEAPHY_RGB_COLOR_BLUE"] = "Color B-255"; // untranslated
Blockly.Msg["LEAPHY_RGB_COLOR_GREEN"] = "Color G-255"; // untranslated
Blockly.Msg["LEAPHY_RGB_COLOR_RED"] = "Color R-255"; // untranslated
Blockly.Msg["LEAPHY_RGB_RAW_COLOR_BLUE"] = "RawColor Blue"; // untranslated
Blockly.Msg["LEAPHY_RGB_RAW_COLOR_GREEN"] = "RawColor Green"; // untranslated
Blockly.Msg["LEAPHY_RGB_RAW_COLOR_RED"] = "RawColor Red"; // untranslated
Blockly.Msg["LEAPHY_RGB_READ_SENSOR"] = "Read RGB sensor"; // untranslated
Blockly.Msg["LEAPHY_SERIAL_PRINT"] = "Show on screen"; // untranslated
Blockly.Msg["LEAPHY_SITUATION_CATEGORY"] = "Thinkflow"; // untranslated
Blockly.Msg["LEAPHY_SONAR_READ_ECHO"] = "Echo"; // untranslated
Blockly.Msg["LEAPHY_SONAR_READ_TRIG"] = "Sonar Trig"; // untranslated
Blockly.Msg["LEAPHY_START"] = "Leaphy"; // untranslated
Blockly.Msg["LEAPHY_STOMACH_SENSOR_TYPE1"] = "Type 1"; // untranslated
Blockly.Msg["LEAPHY_STOMACH_SENSOR_TYPE2"] = "Type 2"; // untranslated
Blockly.Msg["LEAPHY_UNO_CATEGORY"] = "Arduino Uno"; // untranslated
Blockly.Msg["LEAPHY_VARIABLES_CATEGORY"] = "Variables"; // untranslated
Blockly.Msg["LISTS_CREATE_EMPTY_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#create-empty-list"; // untranslated
Blockly.Msg["LISTS_CREATE_EMPTY_TITLE"] = "建立空的清單";
Blockly.Msg["LISTS_CREATE_EMPTY_TOOLTIP"] = "返回一個長度(項目數量)為 0 的清單,不包含任何資料記錄";
Blockly.Msg["LISTS_CREATE_WITH_CONTAINER_TITLE_ADD"] = "清單";
Blockly.Msg["LISTS_CREATE_WITH_CONTAINER_TOOLTIP"] = "添加、刪除或重新排列各部份以重新配置這個清單區塊。";
Blockly.Msg["LISTS_CREATE_WITH_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#create-list-with"; // untranslated
Blockly.Msg["LISTS_CREATE_WITH_INPUT_WITH"] = "使用這些值建立清單";
Blockly.Msg["LISTS_CREATE_WITH_ITEM_TOOLTIP"] = "添加一個項目到清單裡。";
Blockly.Msg["LISTS_CREATE_WITH_TOOLTIP"] = "建立一個具備任意數量項目的清單。";
Blockly.Msg["LISTS_GET_INDEX_FIRST"] = "第一筆";
Blockly.Msg["LISTS_GET_INDEX_FROM_END"] = "倒數第 # 筆";
Blockly.Msg["LISTS_GET_INDEX_FROM_START"] = "#"; // untranslated
Blockly.Msg["LISTS_GET_INDEX_GET"] = "取得";
Blockly.Msg["LISTS_GET_INDEX_GET_REMOVE"] = "取得並移除";
Blockly.Msg["LISTS_GET_INDEX_LAST"] = "最後一筆";
Blockly.Msg["LISTS_GET_INDEX_RANDOM"] = "隨機";
Blockly.Msg["LISTS_GET_INDEX_REMOVE"] = "移除";
Blockly.Msg["LISTS_GET_INDEX_TAIL"] = ""; // untranslated
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_FIRST"] = "返回清單中的第一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_FROM"] = "返回在清單中指定位置的項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_LAST"] = "返回清單中的最後一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_RANDOM"] = "返回清單中隨機一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FIRST"] = "移除並返回清單中的第一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM"] = "移除並返回清單中的指定位置的項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_LAST"] = "移除並返回清單中的最後一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_RANDOM"] = "移除並返回清單中的隨機項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_REMOVE_FIRST"] = "移除清單中的第一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM"] = "移除在清單中指定位置的項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_REMOVE_LAST"] = "移除清單中的最後一個項目。";
Blockly.Msg["LISTS_GET_INDEX_TOOLTIP_REMOVE_RANDOM"] = "移除清單中隨機一個項目。";
Blockly.Msg["LISTS_GET_SUBLIST_END_FROM_END"] = "到 # 倒數";
Blockly.Msg["LISTS_GET_SUBLIST_END_FROM_START"] = "到 #";
Blockly.Msg["LISTS_GET_SUBLIST_END_LAST"] = "到 最後面";
Blockly.Msg["LISTS_GET_SUBLIST_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#getting-a-sublist"; // untranslated
Blockly.Msg["LISTS_GET_SUBLIST_START_FIRST"] = "取得子清單 從 最前面";
Blockly.Msg["LISTS_GET_SUBLIST_START_FROM_END"] = "取得子清單 從 # 倒數";
Blockly.Msg["LISTS_GET_SUBLIST_START_FROM_START"] = "取得子清單 從 #";
Blockly.Msg["LISTS_GET_SUBLIST_TAIL"] = ""; // untranslated
Blockly.Msg["LISTS_GET_SUBLIST_TOOLTIP"] = "複製清單中指定的部分。";
Blockly.Msg["LISTS_INDEX_FROM_END_TOOLTIP"] = "%1 是最後一個項目。";
Blockly.Msg["LISTS_INDEX_FROM_START_TOOLTIP"] = "%1 是第一個項目。";
Blockly.Msg["LISTS_INDEX_OF_FIRST"] = "從 最前面 索引項目";
Blockly.Msg["LISTS_INDEX_OF_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#getting-items-from-a-list"; // untranslated
Blockly.Msg["LISTS_INDEX_OF_LAST"] = "從 最後面 索引項目";
Blockly.Msg["LISTS_INDEX_OF_TOOLTIP"] = "在清單中檢索是否有包含項目,如果有,返回從頭/倒數算起的索引值。如果沒有則返回 %1。";
Blockly.Msg["LISTS_INLIST"] = "自清單";
Blockly.Msg["LISTS_ISEMPTY_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#is-empty"; // untranslated
Blockly.Msg["LISTS_ISEMPTY_TITLE"] = "%1 值為空";
Blockly.Msg["LISTS_ISEMPTY_TOOLTIP"] = "如果該清單為空,則返回 true。";
Blockly.Msg["LISTS_LENGTH_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#length-of"; // untranslated
Blockly.Msg["LISTS_LENGTH_TITLE"] = "長度 %1";
Blockly.Msg["LISTS_LENGTH_TOOLTIP"] = "返回清單的長度(項目數)。";
Blockly.Msg["LISTS_REPEAT_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#create-list-with"; // untranslated
Blockly.Msg["LISTS_REPEAT_TITLE"] = "建立清單使用項目 %1 重複 %2 次";
Blockly.Msg["LISTS_REPEAT_TOOLTIP"] = "建立一個清單,項目中包含指定重複次數的值。";
Blockly.Msg["LISTS_REVERSE_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#reversing-a-list";
Blockly.Msg["LISTS_REVERSE_MESSAGE0"] = "反轉%1";
Blockly.Msg["LISTS_REVERSE_TOOLTIP"] = "反轉清單的複製內容。";
Blockly.Msg["LISTS_SET_INDEX_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#in-list--set"; // untranslated
Blockly.Msg["LISTS_SET_INDEX_INPUT_TO"] = "為";
Blockly.Msg["LISTS_SET_INDEX_INSERT"] = "添加";
Blockly.Msg["LISTS_SET_INDEX_SET"] = "設定";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_INSERT_FIRST"] = "添加一個項目到清單中的第一個位置。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_INSERT_FROM"] = "添加一個項目到清單中的指定位置。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_INSERT_LAST"] = "添加一個項目到清單中的最後一個位置。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_INSERT_RANDOM"] = "添加一個項目到清單中的隨機位置。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_SET_FIRST"] = "設定清單中的第一個項目。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_SET_FROM"] = "設定清單中指定位置的項目。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_SET_LAST"] = "設定清單中的最後一個項目。";
Blockly.Msg["LISTS_SET_INDEX_TOOLTIP_SET_RANDOM"] = "設定清單中隨機一個項目。";
Blockly.Msg["LISTS_SORT_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#sorting-a-list";
Blockly.Msg["LISTS_SORT_ORDER_ASCENDING"] = "升序";
Blockly.Msg["LISTS_SORT_ORDER_DESCENDING"] = "降序";
Blockly.Msg["LISTS_SORT_TITLE"] = "排列 %1 %2 %3";
Blockly.Msg["LISTS_SORT_TOOLTIP"] = "排序清單的複製內容。";
Blockly.Msg["LISTS_SORT_TYPE_IGNORECASE"] = "依字母排序,忽略大小寫";
Blockly.Msg["LISTS_SORT_TYPE_NUMERIC"] = "依數字";
Blockly.Msg["LISTS_SORT_TYPE_TEXT"] = "依字母";
Blockly.Msg["LISTS_SPLIT_HELPURL"] = "https://github.com/google/blockly/wiki/Lists#splitting-strings-and-joining-lists"; // untranslated
Blockly.Msg["LISTS_SPLIT_LIST_FROM_TEXT"] = "從文本製作清單";
Blockly.Msg["LISTS_SPLIT_TEXT_FROM_LIST"] = "從清單拆出文本";
Blockly.Msg["LISTS_SPLIT_TOOLTIP_JOIN"] = "串起清單項目成一個文本,並用分隔符號分開。";
Blockly.Msg["LISTS_SPLIT_TOOLTIP_SPLIT"] = "將文本變成清單項目,按分隔符號拆分。";
Blockly.Msg["LISTS_SPLIT_WITH_DELIMITER"] = "用分隔符";
Blockly.Msg["LOGIC_BOOLEAN_FALSE"] = "假";
Blockly.Msg["LOGIC_BOOLEAN_HELPURL"] = "https://github.com/google/blockly/wiki/Logic#values"; // untranslated
Blockly.Msg["LOGIC_BOOLEAN_TOOLTIP"] = "返回真或假。";
Blockly.Msg["LOGIC_BOOLEAN_TRUE"] = "真";
Blockly.Msg["LOGIC_COMPARE_HELPURL"] = "https://zh.wikipedia.org/wiki/不等";
Blockly.Msg["LOGIC_COMPARE_TOOLTIP_EQ"] = "如果這兩個輸入區塊的結果相等,返回 true。";
Blockly.Msg["LOGIC_COMPARE_TOOLTIP_GT"] = "如果第一個輸入結果大於第二個,返回 true。";
Blockly.Msg["LOGIC_COMPARE_TOOLTIP_GTE"] = "如果第一個輸入結果大於或等於第二個,返回 true。";
Blockly.Msg["LOGIC_COMPARE_TOOLTIP_LT"] = "如果第一個輸入結果比第二個小,返回 true。";
Blockly.Msg["LOGIC_COMPARE_TOOLTIP_LTE"] = "如果第一個輸入結果小於或等於第二個,返回 true。";
Blockly.Msg["LOGIC_COMPARE_TOOLTIP_NEQ"] = "如果這兩個輸入區塊的結果不相等,返回 true。";
Blockly.Msg["LOGIC_NEGATE_HELPURL"] = "https://github.com/google/blockly/wiki/Logic#not"; // untranslated
Blockly.Msg["LOGIC_NEGATE_TITLE"] = "%1 不成立";
Blockly.Msg["LOGIC_NEGATE_TOOLTIP"] = "如果輸入結果是 false,則返回 true。如果輸入結果是 true,則返回 false。";
Blockly.Msg["LOGIC_NULL"] = "空";
Blockly.Msg["LOGIC_NULL_HELPURL"] = "https://en.wikipedia.org/wiki/Nullable_type"; // untranslated
Blockly.Msg["LOGIC_NULL_TOOLTIP"] = "返回空值。";
Blockly.Msg["LOGIC_OPERATION_AND"] = "和";
Blockly.Msg["LOGIC_OPERATION_HELPURL"] = "https://github.com/google/blockly/wiki/Logic#logical-operations"; // untranslated
Blockly.Msg["LOGIC_OPERATION_OR"] = "或";
Blockly.Msg["LOGIC_OPERATION_TOOLTIP_AND"] = "如果兩個輸入結果都為 true,則返回 true。";
Blockly.Msg["LOGIC_OPERATION_TOOLTIP_OR"] = "如果至少一個輸入結果為 true,返回 true。";
Blockly.Msg["LOGIC_TERNARY_CONDITION"] = "測試";
Blockly.Msg["LOGIC_TERNARY_HELPURL"] = "https://zh.wikipedia.org/wiki/條件運算符";
Blockly.Msg["LOGIC_TERNARY_IF_FALSE"] = "如果為假";
Blockly.Msg["LOGIC_TERNARY_IF_TRUE"] = "如果為真";
Blockly.Msg["LOGIC_TERNARY_TOOLTIP"] = "檢查「測試」中的條件。如果條件為真,將返回「如果為真」的值;否則,返回「如果為假」的值。";
Blockly.Msg["MATH_ADDITION_SYMBOL"] = "+"; // untranslated
Blockly.Msg["MATH_ARITHMETIC_HELPURL"] = "https://zh.wikipedia.org/wiki/算術";
Blockly.Msg["MATH_ARITHMETIC_TOOLTIP_ADD"] = "返回兩個數字的總和。";
Blockly.Msg["MATH_ARITHMETIC_TOOLTIP_DIVIDE"] = "返回兩個數字的商。";
Blockly.Msg["MATH_ARITHMETIC_TOOLTIP_MINUS"] = "返回兩個數字的差。";
Blockly.Msg["MATH_ARITHMETIC_TOOLTIP_MULTIPLY"] = "返回兩個數字的乘積。";
Blockly.Msg["MATH_ARITHMETIC_TOOLTIP_POWER"] = "返回第二個數字的指數的第一個數字。";
Blockly.Msg["MATH_ATAN2_HELPURL"] = "https://zh.wikipedia.org/wiki/Atan2";
Blockly.Msg["MATH_ATAN2_TITLE"] = "X:%1 Y:%2 的 Atan2";
Blockly.Msg["MATH_ATAN2_TOOLTIP"] = "回傳點(X,Y)從 -180 至 180 度的反正切值。";
Blockly.Msg["MATH_CHANGE_HELPURL"] = "https://zh.wikipedia.org/wiki/加法";
Blockly.Msg["MATH_CHANGE_TITLE"] = "修改 %1 自 %2";
Blockly.Msg["MATH_CHANGE_TOOLTIP"] = "將數字加到變數「%1」。";
Blockly.Msg["MATH_CONSTANT_HELPURL"] = "https://zh.wikipedia.org/wiki/數學常數";
Blockly.Msg["MATH_CONSTANT_TOOLTIP"] = "返回一個的常見常量: π (3.141......),e (2.718...)、 φ (1.618...)、 開方(2) (1.414......)、 開方(½) (0.707......) 或 ∞ (無窮大)。";
Blockly.Msg["MATH_CONSTRAIN_HELPURL"] = "https://en.wikipedia.org/wiki/Clamping_(graphics)"; // untranslated
Blockly.Msg["MATH_CONSTRAIN_TITLE"] = "限制數字 %1 介於(低)%2 到(高)%3";
Blockly.Msg["MATH_CONSTRAIN_TOOLTIP"] = "限制數字介於兩個指定的數字之間(包含)。";
Blockly.Msg["MATH_DIVISION_SYMBOL"] = "÷"; // untranslated
Blockly.Msg["MATH_IS_DIVISIBLE_BY"] = "可被整除";
Blockly.Msg["MATH_IS_EVEN"] = "是偶數";
Blockly.Msg["MATH_IS_NEGATIVE"] = "是負數";
Blockly.Msg["MATH_IS_ODD"] = "是奇數";
Blockly.Msg["MATH_IS_POSITIVE"] = "是正值";
Blockly.Msg["MATH_IS_PRIME"] = "是質數";
Blockly.Msg["MATH_IS_TOOLTIP"] = "如果數字是偶數,奇數,非負整數,正數、 負數,或如果它是可被某數字整除,則返回 true 或 false。";
Blockly.Msg["MATH_IS_WHOLE"] = "是整數";
Blockly.Msg["MATH_MODULO_HELPURL"] = "https://zh.wikipedia.org/wiki/模除";
Blockly.Msg["MATH_MODULO_TITLE"] = "%1 除以 %2 的餘數";
Blockly.Msg["MATH_MODULO_TOOLTIP"] = "回傳兩個數字相除的餘數。";
Blockly.Msg["MATH_MULTIPLICATION_SYMBOL"] = "×"; // untranslated
Blockly.Msg["MATH_NUMBER_HELPURL"] = "https://zh.wikipedia.org/wiki/數";
Blockly.Msg["MATH_NUMBER_TOOLTIP"] = "一個數字。";
Blockly.Msg["MATH_ONLIST_HELPURL"] = ""; // untranslated
Blockly.Msg["MATH_ONLIST_OPERATOR_AVERAGE"] = "平均數 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_MAX"] = "最大值 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_MEDIAN"] = "中位數 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_MIN"] = "最小值 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_MODE"] = "比較眾數 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_RANDOM"] = "隨機抽取 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_STD_DEV"] = "標準差 自清單";
Blockly.Msg["MATH_ONLIST_OPERATOR_SUM"] = "數字總和 自清單";
Blockly.Msg["MATH_ONLIST_TOOLTIP_AVERAGE"] = "返回清單中數值的平均值(算術平均值)。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_MAX"] = "返回清單項目中最大的數字。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_MEDIAN"] = "返回清單中數值的中位數。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_MIN"] = "返回清單項目中最小的數字。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_MODE"] = "返回一個清單中的最常見的項目。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_RANDOM"] = "從清單中返回一個隨機的項目。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_STD_DEV"] = "返回清單中數字的標準差。";
Blockly.Msg["MATH_ONLIST_TOOLTIP_SUM"] = "返回清單中的所有數字的總和。";
Blockly.Msg["MATH_POWER_SYMBOL"] = "^"; // untranslated
Blockly.Msg["MATH_RANDOM_FLOAT_HELPURL"] = "https://zh.wikipedia.org/wiki/隨機數生成器";
Blockly.Msg["MATH_RANDOM_FLOAT_TITLE_RANDOM"] = "隨機取分數";
Blockly.Msg["MATH_RANDOM_FLOAT_TOOLTIP"] = "在 0.0(包含)和 1.0(不包含)之間隨機取一個數。";
Blockly.Msg["MATH_RANDOM_INT_HELPURL"] = "https://zh.wikipedia.org/wiki/隨機數生成器";
Blockly.Msg["MATH_RANDOM_INT_TITLE"] = "隨機取數 %1 到 %2";
Blockly.Msg["MATH_RANDOM_INT_TOOLTIP"] = "在指定二個數之間隨機取一個數(包含)。";
Blockly.Msg["MATH_ROUND_HELPURL"] = "https://zh.wikipedia.org/wiki/數值簡化";
Blockly.Msg["MATH_ROUND_OPERATOR_ROUND"] = "四捨五入";
Blockly.Msg["MATH_ROUND_OPERATOR_ROUNDDOWN"] = "無條件捨去";
Blockly.Msg["MATH_ROUND_OPERATOR_ROUNDUP"] = "無條件進位";
Blockly.Msg["MATH_ROUND_TOOLTIP"] = "將數字無條件進位或無條件捨去。";
Blockly.Msg["MATH_SINGLE_HELPURL"] = "https://zh.wikipedia.org/wiki/平方根";
Blockly.Msg["MATH_SINGLE_OP_ABSOLUTE"] = "絕對值";
Blockly.Msg["MATH_SINGLE_OP_ROOT"] = "開根號";
Blockly.Msg["MATH_SINGLE_TOOLTIP_ABS"] = "返回指定數字的絕對值。";
Blockly.Msg["MATH_SINGLE_TOOLTIP_EXP"] = "返回指定數字指數的e的冪次。";
Blockly.Msg["MATH_SINGLE_TOOLTIP_LN"] = "返回指定數字的自然對數。";
Blockly.Msg["MATH_SINGLE_TOOLTIP_LOG10"] = "返回指定數字的對數。";
Blockly.Msg["MATH_SINGLE_TOOLTIP_NEG"] = "返回指定數字的相反數。";
Blockly.Msg["MATH_SINGLE_TOOLTIP_POW10"] = "返回指定數字指數的10的冪次。";
Blockly.Msg["MATH_SINGLE_TOOLTIP_ROOT"] = "返回指定數字的平方根。";
Blockly.Msg["MATH_SUBTRACTION_SYMBOL"] = "-"; // untranslated
Blockly.Msg["MATH_TRIG_ACOS"] = "acos"; // untranslated
Blockly.Msg["MATH_TRIG_ASIN"] = "asin"; // untranslated
Blockly.Msg["MATH_TRIG_ATAN"] = "atan"; // untranslated
Blockly.Msg["MATH_TRIG_COS"] = "cos"; // untranslated
Blockly.Msg["MATH_TRIG_HELPURL"] = "https://zh.wikipedia.org/wiki/三角函數";
Blockly.Msg["MATH_TRIG_SIN"] = "sin"; // untranslated
Blockly.Msg["MATH_TRIG_TAN"] = "tan"; // untranslated
Blockly.Msg["MATH_TRIG_TOOLTIP_ACOS"] = "返回指定角度的反餘弦值(非弧度)。";
Blockly.Msg["MATH_TRIG_TOOLTIP_ASIN"] = "返回指定角度的反正弦值(非弧度)。";
Blockly.Msg["MATH_TRIG_TOOLTIP_ATAN"] = "返回指定角度的反正切值。";
Blockly.Msg["MATH_TRIG_TOOLTIP_COS"] = "返回指定角度的餘弦值(非弧度)。";
Blockly.Msg["MATH_TRIG_TOOLTIP_SIN"] = "返回指定角度的正弦值(非弧度)。";
Blockly.Msg["MATH_TRIG_TOOLTIP_TAN"] = "返回指定角度的正切值(非弧度)。";
Blockly.Msg["NEW_COLOUR_VARIABLE"] = "建立顏色變數…";
Blockly.Msg["NEW_NUMBER_VARIABLE"] = "建立數值變數……";
Blockly.Msg["NEW_STRING_VARIABLE"] = "建立字串變數……";
Blockly.Msg["NEW_VARIABLE"] = "建立變數…";
Blockly.Msg["NEW_VARIABLE_TITLE"] = "新變數名稱:";
Blockly.Msg["NEW_VARIABLE_TYPE_TITLE"] = "新變數類型:";
Blockly.Msg["ORDINAL_NUMBER_SUFFIX"] = ""; // untranslated
Blockly.Msg["PROCEDURES_ALLOW_STATEMENTS"] = "允許陳述式";
Blockly.Msg["PROCEDURES_BEFORE_PARAMS"] = "與:";
Blockly.Msg["PROCEDURES_CALLNORETURN_HELPURL"] = "https://zh.wikipedia.org/wiki/子程式";
Blockly.Msg["PROCEDURES_CALLNORETURN_TOOLTIP"] = "執行使用者定義的函式「%1」。";
Blockly.Msg["PROCEDURES_CALLRETURN_HELPURL"] = "https://zh.wikipedia.org/wiki/%E5%AD%90%E7%A8%8B%E5%BA%8F";
Blockly.Msg["PROCEDURES_CALLRETURN_TOOLTIP"] = "執行使用者定義的函式「%1」,並使用它的回傳值。";
Blockly.Msg["PROCEDURES_CALL_BEFORE_PARAMS"] = "與:";
Blockly.Msg["PROCEDURES_CREATE_DO"] = "建立「%1」";
Blockly.Msg["PROCEDURES_DEFNORETURN_COMMENT"] = "描述此函式...";
Blockly.Msg["PROCEDURES_DEFNORETURN_DO"] = ""; // untranslated
Blockly.Msg["PROCEDURES_DEFNORETURN_HELPURL"] = "https://zh.wikipedia.org/wiki/子程式";
Blockly.Msg["PROCEDURES_DEFNORETURN_PROCEDURE"] = "做些什麼";
Blockly.Msg["PROCEDURES_DEFNORETURN_TITLE"] = "到";
Blockly.Msg["PROCEDURES_DEFNORETURN_TOOLTIP"] = "創建一個無回傳值的函式。";
Blockly.Msg["PROCEDURES_DEFRETURN_HELPURL"] = "https://zh.wikipedia.org/wiki/子程式";
Blockly.Msg["PROCEDURES_DEFRETURN_RETURN"] = "返回";
Blockly.Msg["PROCEDURES_DEFRETURN_TITLE"] = "Subprogram"; // untranslated
Blockly.Msg["PROCEDURES_DEFRETURN_TOOLTIP"] = "創建一個有回傳值的的函式。";
Blockly.Msg["PROCEDURES_DEF_DUPLICATE_WARNING"] = "警告: 此函式中有重複的參數。";
Blockly.Msg["PROCEDURES_HIGHLIGHT_DEF"] = "反白顯示函式定義";
Blockly.Msg["PROCEDURES_IFRETURN_HELPURL"] = "http://c2.com/cgi/wiki?GuardClause"; // untranslated
Blockly.Msg["PROCEDURES_IFRETURN_TOOLTIP"] = "如果值為 true,則返回第二個值。";
Blockly.Msg["PROCEDURES_IFRETURN_WARNING"] = "警告:這個區塊只可以在定義函式時使用。";
Blockly.Msg["PROCEDURES_MUTATORARG_TITLE"] = "輸入名稱:";
Blockly.Msg["PROCEDURES_MUTATORARG_TOOLTIP"] = "添加一個輸入區塊到函式。";
Blockly.Msg["PROCEDURES_MUTATORCONTAINER_TITLE"] = "輸入";
Blockly.Msg["PROCEDURES_MUTATORCONTAINER_TOOLTIP"] = "添加、刪除或重新排列此函式的輸入。";
Blockly.Msg["REDO"] = "重做";
Blockly.Msg["REMOVE_COMMENT"] = "移除註解";
Blockly.Msg["RENAME_VARIABLE"] = "重新命名變數...";
Blockly.Msg["RENAME_VARIABLE_TITLE"] = "將所有「%1」變數重新命名為:";
Blockly.Msg["TEXT_APPEND_HELPURL"] = "https://github.com/google/blockly/wiki/Text#text-modification"; // untranslated
Blockly.Msg["TEXT_APPEND_TITLE"] = "至 %1 套用文字 %2";
Blockly.Msg["TEXT_APPEND_TOOLTIP"] = "添加一些文字到變數「%1」之後。";
Blockly.Msg["TEXT_CHANGECASE_HELPURL"] = "https://github.com/google/blockly/wiki/Text#adjusting-text-case"; // untranslated
Blockly.Msg["TEXT_CHANGECASE_OPERATOR_LOWERCASE"] = "轉成英文小寫";
Blockly.Msg["TEXT_CHANGECASE_OPERATOR_TITLECASE"] = "轉成 英文首字大寫";
Blockly.Msg["TEXT_CHANGECASE_OPERATOR_UPPERCASE"] = "轉成英文大寫";
Blockly.Msg["TEXT_CHANGECASE_TOOLTIP"] = "使用不同的大小寫複製這段文字。";
Blockly.Msg["TEXT_CHARAT_FIRST"] = "取得 第一個字元";
Blockly.Msg["TEXT_CHARAT_FROM_END"] = "取得 倒數第 # 個字元";
Blockly.Msg["TEXT_CHARAT_FROM_START"] = "取得 字元 #";
Blockly.Msg["TEXT_CHARAT_HELPURL"] = "https://github.com/google/blockly/wiki/Text#extracting-text"; // untranslated
Blockly.Msg["TEXT_CHARAT_LAST"] = "取得 最後一個字元";
Blockly.Msg["TEXT_CHARAT_RANDOM"] = "取得 任意字元";
Blockly.Msg["TEXT_CHARAT_TAIL"] = ""; // untranslated
Blockly.Msg["TEXT_CHARAT_TITLE"] = "在文字 %1 %2";
Blockly.Msg["TEXT_CHARAT_TOOLTIP"] = "返回位於指定位置的字元。";
Blockly.Msg["TEXT_COUNT_HELPURL"] = "https://github.com/google/blockly/wiki/Text#counting-substrings";
Blockly.Msg["TEXT_COUNT_MESSAGE0"] = "在%2計算%1";
Blockly.Msg["TEXT_COUNT_TOOLTIP"] = "計算某些文字在內容裡的出現次數。";
Blockly.Msg["TEXT_CREATE_JOIN_ITEM_TOOLTIP"] = "添加一個項目到字串中。";
Blockly.Msg["TEXT_CREATE_JOIN_TITLE_JOIN"] = "加入";
Blockly.Msg["TEXT_CREATE_JOIN_TOOLTIP"] = "添加、刪除或重新排列各部份以重新配置這個文字區塊。";
Blockly.Msg["TEXT_GET_SUBSTRING_END_FROM_END"] = "到 倒數第 # 個字元";
Blockly.Msg["TEXT_GET_SUBSTRING_END_FROM_START"] = "到 字元 #";
Blockly.Msg["TEXT_GET_SUBSTRING_END_LAST"] = "到 最後一個字元";
Blockly.Msg["TEXT_GET_SUBSTRING_HELPURL"] = "https://github.com/google/blockly/wiki/Text#extracting-a-region-of-text"; // untranslated
Blockly.Msg["TEXT_GET_SUBSTRING_INPUT_IN_TEXT"] = "在字串";
Blockly.Msg["TEXT_GET_SUBSTRING_START_FIRST"] = "取得 第一個字元";
Blockly.Msg["TEXT_GET_SUBSTRING_START_FROM_END"] = "取得 倒數第 # 個字元";
Blockly.Msg["TEXT_GET_SUBSTRING_START_FROM_START"] = "取得 字元 #";
Blockly.Msg["TEXT_GET_SUBSTRING_TAIL"] = ""; // untranslated
Blockly.Msg["TEXT_GET_SUBSTRING_TOOLTIP"] = "返回指定的部分文字。";
Blockly.Msg["TEXT_INDEXOF_HELPURL"] = "https://github.com/google/blockly/wiki/Text#finding-text"; // untranslated
Blockly.Msg["TEXT_INDEXOF_OPERATOR_FIRST"] = "從 最前面 索引字串";
Blockly.Msg["TEXT_INDEXOF_OPERATOR_LAST"] = "從 最後面 索引字串";
Blockly.Msg["TEXT_INDEXOF_TITLE"] = "在文字 %1 %2 %3";
Blockly.Msg["TEXT_INDEXOF_TOOLTIP"] = "在字串1中檢索是否有包含字串2,如果有,返回從頭/倒數算起的索引值。如果沒有則返回 %1。";
Blockly.Msg["TEXT_ISEMPTY_HELPURL"] = "https://github.com/google/blockly/wiki/Text#checking-for-empty-text"; // untranslated
Blockly.Msg["TEXT_ISEMPTY_TITLE"] = "%1 為空";
Blockly.Msg["TEXT_ISEMPTY_TOOLTIP"] = "如果提供的字串為空,則返回 true。";
Blockly.Msg["TEXT_JOIN_HELPURL"] = "https://github.com/google/blockly/wiki/Text#text-creation"; // untranslated
Blockly.Msg["TEXT_JOIN_TITLE_CREATEWITH"] = "字串組合";
Blockly.Msg["TEXT_JOIN_TOOLTIP"] = "通過連接任意數量的項目來建立一串文字。";
Blockly.Msg["TEXT_LENGTH_HELPURL"] = "https://github.com/google/blockly/wiki/Text#text-modification"; // untranslated
Blockly.Msg["TEXT_LENGTH_TITLE"] = "長度 %1";
Blockly.Msg["TEXT_LENGTH_TOOLTIP"] = "返回這串文字的字元數(包含空格)。";
Blockly.Msg["TEXT_PRINT_HELPURL"] = "https://github.com/google/blockly/wiki/Text#printing-text"; // untranslated
Blockly.Msg["TEXT_PRINT_TITLE"] = "輸出 %1";
Blockly.Msg["TEXT_PRINT_TOOLTIP"] = "輸出指定的文字、 數字或其他值。";
Blockly.Msg["TEXT_PROMPT_HELPURL"] = "https://github.com/google/blockly/wiki/Text#getting-input-from-the-user"; // untranslated
Blockly.Msg["TEXT_PROMPT_TOOLTIP_NUMBER"] = "輸入數字";
Blockly.Msg["TEXT_PROMPT_TOOLTIP_TEXT"] = "輸入文字";
Blockly.Msg["TEXT_PROMPT_TYPE_NUMBER"] = "輸入 數字 並顯示提示訊息";
Blockly.Msg["TEXT_PROMPT_TYPE_TEXT"] = "輸入 文字 並顯示提示訊息";
Blockly.Msg["TEXT_REPLACE_HELPURL"] = "https://github.com/google/blockly/wiki/Text#replacing-substrings";
Blockly.Msg["TEXT_REPLACE_MESSAGE0"] = "在%3以%2取代%1";
Blockly.Msg["TEXT_REPLACE_TOOLTIP"] = "取代在內容裡的全部某些文字。";
Blockly.Msg["TEXT_REVERSE_HELPURL"] = "https://github.com/google/blockly/wiki/Text#reversing-text";
Blockly.Msg["TEXT_REVERSE_MESSAGE0"] = "反轉%1";
Blockly.Msg["TEXT_REVERSE_TOOLTIP"] = "反轉排序在文字裡的字元。"; | Blockly.Msg["TEXT_TRIM_OPERATOR_LEFT"] = "消除左側空格";
Blockly.Msg["TEXT_TRIM_OPERATOR_RIGHT"] = "消除右側空格";
Blockly.Msg["TEXT_TRIM_TOOLTIP"] = "複製這段文字,同時刪除兩端多餘的空格。";
Blockly.Msg["TODAY"] = "今天";
Blockly.Msg["UNDO"] = "還原";
Blockly.Msg["UNNAMED_KEY"] = "未命名";
Blockly.Msg["VARIABLES_DEFAULT_NAME"] = "項目";
Blockly.Msg["VARIABLES_GET_CREATE_SET"] = "建立「賦值 %1」";
Blockly.Msg["VARIABLES_GET_HELPURL"] = "https://github.com/google/blockly/wiki/Variables#get"; // untranslated
Blockly.Msg["VARIABLES_GET_TOOLTIP"] = "返回此變數的值。";
Blockly.Msg["VARIABLES_SET"] = "賦值 %1 成 %2";
Blockly.Msg["VARIABLES_SET_CREATE_GET"] = "建立「取得 %1」";
Blockly.Msg["VARIABLES_SET_HELPURL"] = "https://github.com/google/blockly/wiki/Variables#set"; // untranslated
Blockly.Msg["VARIABLES_SET_TOOLTIP"] = "設定此變數,好和輸入結果相等。";
Blockly.Msg["VARIABLE_ALREADY_EXISTS"] = "已存在變數「%1」。";
Blockly.Msg["VARIABLE_ALREADY_EXISTS_FOR_ANOTHER_TYPE"] = "已存在名為「%1」,且用於其它類型「%2」的變數。";
Blockly.Msg["WORKSPACE_ARIA_LABEL"] = "Blockly工作區";
Blockly.Msg["WORKSPACE_COMMENT_DEFAULT_TEXT"] = "來說些事情...";
Blockly.Msg["CONTROLS_IF_ELSEIF_TITLE_ELSEIF"] = Blockly.Msg["CONTROLS_IF_MSG_ELSEIF"];
Blockly.Msg["CONTROLS_IF_ELSE_TITLE_ELSE"] = Blockly.Msg["CONTROLS_IF_MSG_ELSE"];
Blockly.Msg["CONTROLS_IF_IF_TITLE_IF"] = Blockly.Msg["CONTROLS_IF_MSG_IF"];
Blockly.Msg["LISTS_CREATE_WITH_ITEM_TITLE"] = Blockly.Msg["VARIABLES_DEFAULT_NAME"];
Blockly.Msg["LISTS_GET_INDEX_HELPURL"] = Blockly.Msg["LISTS_INDEX_OF_HELPURL"];
Blockly.Msg["LISTS_GET_INDEX_INPUT_IN_LIST"] = Blockly.Msg["LISTS_INLIST"];
Blockly.Msg["LISTS_GET_SUBLIST_INPUT_IN_LIST"] = Blockly.Msg["LISTS_INLIST"];
Blockly.Msg["LISTS_INDEX_OF_INPUT_IN_LIST"] = Blockly.Msg["LISTS_INLIST"];
Blockly.Msg["LISTS_SET_INDEX_INPUT_IN_LIST"] = Blockly.Msg["LISTS_INLIST"];
Blockly.Msg["MATH_CHANGE_TITLE_ITEM"] = Blockly.Msg["VARIABLES_DEFAULT_NAME"];
Blockly.Msg["PROCEDURES_DEFRETURN_COMMENT"] = Blockly.Msg["PROCEDURES_DEFNORETURN_COMMENT"];
Blockly.Msg["PROCEDURES_DEFRETURN_DO"] = Blockly.Msg["PROCEDURES_DEFNORETURN_DO"];
Blockly.Msg["PROCEDURES_DEFRETURN_PROCEDURE"] = Blockly.Msg["PROCEDURES_DEFNORETURN_PROCEDURE"];
Blockly.Msg["TEXT_APPEND_VARIABLE"] = Blockly.Msg["VARIABLES_DEFAULT_NAME"];
Blockly.Msg["TEXT_CREATE_JOIN_ITEM_TITLE_ITEM"] = Blockly.Msg["VARIABLES_DEFAULT_NAME"];
Blockly.Msg["LEAPHY_HUE"] = "188";
Blockly.Msg["LOGIC_HUE"] = "210";
Blockly.Msg["LOOPS_HUE"] = "120";
Blockly.Msg["MATH_HUE"] = "230";
Blockly.Msg["TEXTS_HUE"] = "160";
Blockly.Msg["LISTS_HUE"] = "260";
Blockly.Msg["COLOUR_HUE"] = "20";
Blockly.Msg["VARIABLES_HUE"] = "330";
Blockly.Msg["VARIABLES_DYNAMIC_HUE"] = "310";
Blockly.Msg["PROCEDURES_HUE"] = "290"; | Blockly.Msg["TEXT_TEXT_HELPURL"] = "https://zh.wikipedia.org/wiki/字串";
Blockly.Msg["TEXT_TEXT_TOOLTIP"] = "一個字元、一個單詞,或一串文字。";
Blockly.Msg["TEXT_TRIM_HELPURL"] = "https://github.com/google/blockly/wiki/Text#trimming-removing-spaces"; // untranslated
Blockly.Msg["TEXT_TRIM_OPERATOR_BOTH"] = "消除兩側空格"; |
lib.rs | //! Platform agnostic, effortless writable outputs generation
//!
//! # Example
//!
//! ```
//! // Run Screen for `ttyACM0` with 115200 baud rate
//! // $ screen /dev/ttyACM0 115200
//!
//! # struct Serial;
//! #
//! # use embedded_hal::serial::Write;
//! #
//! # impl<'a> Write<u8> for Serial {
//! # type Error = ();
//! #
//! # fn flush(&mut self) -> nb::Result<(), Self::Error> {
//! # Ok(())
//! # }
//! #
//! # fn write(&mut self, word: u8) -> nb::Result<(), Self::Error> {
//! # Ok(())
//! # }
//! # } | //! use ufmt::{derive::uDebug, uwrite};
//!
//! # let mut serial = Serial;
//! #[derive(uDebug)]
//! struct Pair { x: u32, y: u32 }
//!
//! let mut out = serial.output();
//! let pair = Pair { x: 1, y: 2 };
//! uwrite!(out, "{:?}", pair).unwrap(); // prints: Pair { x: 1, y: 2 }
//! ```
#![no_std]
/// Serial (USART) implementation
pub mod serial;
/// Generic trait for output generation
pub trait Output<'a, T> {
/// Returns writable output
fn output(&'a mut self) -> T;
} | //! #
//! use uout::Output; |
controller_test.go | package deployment
import (
"context"
"os"
"testing"
"github.com/equinor/radix-operator/pkg/apis/defaults"
"github.com/equinor/radix-operator/pkg/apis/kube"
"github.com/equinor/radix-operator/pkg/apis/test"
"github.com/equinor/radix-operator/pkg/apis/utils"
radixclient "github.com/equinor/radix-operator/pkg/client/clientset/versioned"
fakeradix "github.com/equinor/radix-operator/pkg/client/clientset/versioned/fake"
informers "github.com/equinor/radix-operator/pkg/client/informers/externalversions"
prometheusclient "github.com/prometheus-operator/prometheus-operator/pkg/client/versioned"
prometheusfake "github.com/prometheus-operator/prometheus-operator/pkg/client/versioned/fake"
"github.com/stretchr/testify/assert"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
kubeinformers "k8s.io/client-go/informers"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/kubernetes/fake"
"k8s.io/client-go/tools/record"
)
const (
clusterName = "AnyClusterName"
containerRegistry = "any.container.registry"
)
func setupTest() (*test.Utils, kubernetes.Interface, *kube.Kube, radixclient.Interface, prometheusclient.Interface) {
client := fake.NewSimpleClientset()
radixClient := fakeradix.NewSimpleClientset()
kubeUtil, _ := kube.New(client, radixClient)
prometheusclient := prometheusfake.NewSimpleClientset()
handlerTestUtils := test.NewTestUtils(client, radixClient)
handlerTestUtils.CreateClusterPrerequisites(clusterName, containerRegistry)
return &handlerTestUtils, client, kubeUtil, radixClient, prometheusclient
}
func teardownTest() |
func Test_Controller_Calls_Handler(t *testing.T) {
anyAppName := "test-app"
anyEnvironment := "qa"
// Setup
tu, client, kubeUtil, radixClient, prometheusclient := setupTest()
client.CoreV1().Namespaces().Create(
context.TODO(),
&corev1.Namespace{
ObjectMeta: metav1.ObjectMeta{
Name: utils.GetEnvironmentNamespace(anyAppName, anyEnvironment),
Labels: map[string]string{
kube.RadixAppLabel: anyAppName,
kube.RadixEnvLabel: anyEnvironment,
},
},
},
metav1.CreateOptions{})
stop := make(chan struct{})
synced := make(chan bool)
defer close(stop)
defer close(synced)
radixInformerFactory := informers.NewSharedInformerFactory(radixClient, 0)
kubeInformerFactory := kubeinformers.NewSharedInformerFactory(client, 0)
deploymentHandler := NewHandler(
client,
kubeUtil,
radixClient,
prometheusclient,
WithHasSyncedCallback(func(syncedOk bool) { synced <- syncedOk }),
)
go startDeploymentController(client, kubeUtil, radixClient, radixInformerFactory, kubeInformerFactory, deploymentHandler, stop)
// Test
// Create deployment should sync
rd, _ := tu.ApplyDeployment(
utils.ARadixDeployment().
WithAppName(anyAppName).
WithEnvironment(anyEnvironment))
op, ok := <-synced
assert.True(t, ok)
assert.True(t, op)
syncedRd, _ := radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Get(context.TODO(), rd.GetName(), metav1.GetOptions{})
lastReconciled := syncedRd.Status.Reconciled
assert.Truef(t, !lastReconciled.Time.IsZero(), "Reconciled on status should have been set")
// Update deployment should sync. Only actual updates will be handled by the controller
noReplicas := 0
rd.Spec.Components[0].Replicas = &noReplicas
radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Update(context.TODO(), rd, metav1.UpdateOptions{})
op, ok = <-synced
assert.True(t, ok)
assert.True(t, op)
syncedRd, _ = radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Get(context.TODO(), rd.GetName(), metav1.GetOptions{})
assert.Truef(t, !lastReconciled.Time.IsZero(), "Reconciled on status should have been set")
assert.NotEqual(t, lastReconciled, syncedRd.Status.Reconciled)
lastReconciled = syncedRd.Status.Reconciled
// Delete service should sync
services, _ := client.CoreV1().Services(rd.ObjectMeta.Namespace).List(
context.TODO(),
metav1.ListOptions{
LabelSelector: "radix-app=test-app",
})
for _, aservice := range services.Items {
client.CoreV1().Services(rd.ObjectMeta.Namespace).Delete(context.TODO(), aservice.Name, metav1.DeleteOptions{})
op, ok = <-synced
assert.True(t, ok)
assert.True(t, op)
}
syncedRd, _ = radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Get(context.TODO(), rd.GetName(), metav1.GetOptions{})
assert.Truef(t, !lastReconciled.Time.IsZero(), "Reconciled on status should have been set")
assert.NotEqual(t, lastReconciled, syncedRd.Status.Reconciled)
lastReconciled = syncedRd.Status.Reconciled
teardownTest()
}
func startDeploymentController(client kubernetes.Interface,
kubeutil *kube.Kube,
radixClient radixclient.Interface,
radixInformerFactory informers.SharedInformerFactory,
kubeInformerFactory kubeinformers.SharedInformerFactory,
handler *Handler, stop chan struct{}) {
eventRecorder := &record.FakeRecorder{}
waitForChildrenToSync := false
controller := NewController(
client, kubeutil, radixClient, handler,
kubeInformerFactory,
radixInformerFactory,
waitForChildrenToSync,
eventRecorder)
kubeInformerFactory.Start(stop)
radixInformerFactory.Start(stop)
controller.Run(1, stop)
}
| {
os.Unsetenv(defaults.OperatorRollingUpdateMaxUnavailable)
os.Unsetenv(defaults.OperatorRollingUpdateMaxSurge)
os.Unsetenv(defaults.OperatorReadinessProbeInitialDelaySeconds)
os.Unsetenv(defaults.OperatorReadinessProbePeriodSeconds)
} |
vm.rs | use std::collections::HashMap;
use thiserror::Error;
use crate::scripting::lua4::{Lua4Function, Lua4Instruction, Lua4Value};
#[derive(Debug, Clone, Error)]
pub enum Lua4VMError {
#[error("Missing value in stack")]
MissingStackValue,
#[error("Global {0} not found")]
GlobalNotFound(String),
#[error("Expected value to be a Closure")]
NotClosure,
#[error("Unimplemented instruction {0:?}")]
Unimplemented(Lua4Instruction),
}
pub trait Lua4VMRustClosures {
fn call_rust_closure(
&mut self,
name: &str,
parameters: Vec<Lua4Value>,
) -> Result<Vec<Lua4Value>, Lua4VMError>;
}
#[derive(Default)]
pub struct Lua4VM {
pub globals: HashMap<String, Lua4Value>,
}
impl Lua4VM {
pub fn new() -> Self {
Self::default()
}
pub fn set_global(&mut self, name: String, value: Lua4Value) {
self.globals.insert(name, value);
}
pub fn get_global(&mut self, name: &str) -> Option<&Lua4Value> {
self.globals.get(name)
}
pub fn call_lua_function<T: Lua4VMRustClosures>(
&mut self,
rust_closures: &mut T,
function: &Lua4Function,
parameters: &[Lua4Value],
) -> Result<Vec<Lua4Value>, anyhow::Error> {
let mut stack = Vec::with_capacity(function.max_stack_size as usize);
let local_stack_index = stack.len();
for i in 0..function.num_parameters as usize {
stack.push(parameters.get(i).cloned().unwrap_or(Lua4Value::Nil));
}
| pc += 1;
match instruction {
Lua4Instruction::OP_END => break,
Lua4Instruction::OP_RETURN(return_stack_index) => {
// Leave only results on stack
stack.drain(0..local_stack_index + return_stack_index as usize);
break;
}
Lua4Instruction::OP_CALL(parameter_stack_index, num_results) => {
let parameters =
stack.split_off(local_stack_index + parameter_stack_index as usize + 1);
let closure = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let mut results = if let Lua4Value::Closure(function, _upvalues) = closure {
let function = function.clone();
self.call_lua_function(rust_closures, &function, ¶meters)?
} else if let Lua4Value::RustClosure(function_name) = closure {
log::debug!(target: "lua", "Call rust closure: {}", function_name);
rust_closures.call_rust_closure(&function_name, parameters)?
} else {
return Err(Lua4VMError::NotClosure.into());
};
results.reverse();
for _ in 0..num_results {
stack.push(results.pop().unwrap_or(Lua4Value::Nil));
}
}
// TODO: Lua4Instruction::OP_TAILCALL(u32, u32)
Lua4Instruction::OP_PUSHNIL(count) => {
for _ in 0..count {
stack.push(Lua4Value::Nil);
}
}
Lua4Instruction::OP_POP(count) => {
for _ in 0..count {
stack.pop();
}
}
Lua4Instruction::OP_PUSHINT(value) => {
stack.push(Lua4Value::Number(value as f64));
}
Lua4Instruction::OP_PUSHSTRING(kstr) => {
stack.push(Lua4Value::String(
function.constant_strings[kstr as usize].clone(),
));
}
Lua4Instruction::OP_PUSHNUM(knum) => {
stack.push(Lua4Value::Number(function.constant_numbers[knum as usize]));
}
Lua4Instruction::OP_PUSHNEGNUM(knum) => {
stack.push(Lua4Value::Number(-function.constant_numbers[knum as usize]));
}
// TODO: Lua4Instruction::OP_PUSHUPVALUE(u32)
Lua4Instruction::OP_GETLOCAL(index) => {
let value = stack
.get(local_stack_index + index as usize)
.ok_or(Lua4VMError::MissingStackValue)?
.clone();
stack.push(value);
}
Lua4Instruction::OP_GETGLOBAL(kstr) => {
let name = &function.constant_strings[kstr as usize];
let value = self
.get_global(name)
.ok_or_else(|| Lua4VMError::GlobalNotFound(name.into()))?
.clone();
stack.push(value);
}
// TODO: Lua4Instruction::OP_GETTABLE
// TODO: Lua4Instruction::OP_GETDOTTED(u32)
// TODO: Lua4Instruction::OP_GETINDEXED(u32)
// TODO: Lua4Instruction::OP_PUSHSELF(u32)
// TODO: Lua4Instruction::OP_CREATETABLE(u32)
Lua4Instruction::OP_SETLOCAL(index) => {
stack[local_stack_index + index as usize] =
stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
}
Lua4Instruction::OP_SETGLOBAL(kstr) => {
self.set_global(
function.constant_strings[kstr as usize].clone(),
stack.pop().ok_or(Lua4VMError::MissingStackValue)?,
);
}
// TODO: Lua4Instruction::OP_SETTABLE(u32, u32)
// TODO: Lua4Instruction::OP_SETLIST(u32, u32)
// TODO: Lua4Instruction::OP_SETMAP(u32)
// TODO: Lua4Instruction::OP_ADD
// TODO: Lua4Instruction::OP_ADDI(i32)
// TODO: Lua4Instruction::OP_SUB
// TODO: Lua4Instruction::OP_MULT
// TODO: Lua4Instruction::OP_DIV
// TODO: Lua4Instruction::OP_POW
// TODO: Lua4Instruction::OP_CONCAT(u32)
// TODO: Lua4Instruction::OP_MINUS
// TODO: Lua4Instruction::OP_NOT
Lua4Instruction::OP_JMPNE(target) => {
let rhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let lhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if lhs != rhs {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPEQ(target) => {
let rhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let lhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if lhs == rhs {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPLT(target) => {
let rhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let lhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if lhs < rhs {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPLE(target) => {
let rhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let lhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if lhs <= rhs {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPGT(target) => {
let rhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let lhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if lhs > rhs {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPGE(target) => {
let rhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
let lhs = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if lhs >= rhs {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPT(target) => {
let value = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if !matches!(value, Lua4Value::Nil) {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPF(target) => {
let value = stack.pop().ok_or(Lua4VMError::MissingStackValue)?;
if matches!(value, Lua4Value::Nil) {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPONT(target) => {
// If value on top of stack is Nil then pop it, else jump
let peek_value = stack.last().ok_or(Lua4VMError::MissingStackValue)?;
if matches!(peek_value, Lua4Value::Nil) {
stack.pop();
} else {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMPONF(target) => {
// If value on top of stack is not Nil then pop it, else jump
let peek_value = stack.last().ok_or(Lua4VMError::MissingStackValue)?;
if !matches!(peek_value, Lua4Value::Nil) {
stack.pop();
} else {
pc = (pc as i32 + target) as usize;
}
}
Lua4Instruction::OP_JMP(target) => {
pc = (pc as i32 + target) as usize;
}
Lua4Instruction::OP_PUSHNILJMP => {
stack.push(Lua4Value::Nil);
pc = (pc as i32 + 1) as usize;
}
// TODO: Lua4Instruction::OP_FORPREP(i32)
// TODO: Lua4Instruction::OP_FORLOOP(i32)
// TODO: Lua4Instruction::OP_LFORPREP(i32)
// TODO: Lua4Instruction::OP_LFORLOOP(i32)
Lua4Instruction::OP_CLOSURE(kproto, b) => {
let upvalues = stack.split_off(stack.len() - b as usize);
stack.push(Lua4Value::Closure(
function.constant_functions[kproto as usize].clone(),
upvalues,
));
}
_ => {
anyhow::bail!(Lua4VMError::Unimplemented(instruction))
}
}
}
Ok(stack)
}
pub fn call_global_closure<T: Lua4VMRustClosures>(
&mut self,
rust_closures: &mut T,
name: &str,
parameters: &[Lua4Value],
) -> Result<Vec<Lua4Value>, anyhow::Error> {
let global_value = self
.get_global(name)
.ok_or_else(|| Lua4VMError::GlobalNotFound(name.into()))?;
if let Lua4Value::Closure(function, _upvalues) = global_value {
let function = function.clone();
self.call_lua_function(rust_closures, &function, parameters)
} else {
Err(Lua4VMError::NotClosure.into())
}
}
} | let mut pc = 0;
loop {
let instruction = function.instructions[pc]; |
api_surface.go | // Copyright 2021 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package multitree
import (
"android/soong/android"
"fmt"
"github.com/google/blueprint"
)
var (
pctx = android.NewPackageContext("android/soong/multitree")
)
func init() {
RegisterApiSurfaceBuildComponents(android.InitRegistrationContext)
}
var PrepareForTestWithApiSurface = android.FixtureRegisterWithContext(RegisterApiSurfaceBuildComponents)
func RegisterApiSurfaceBuildComponents(ctx android.RegistrationContext) {
ctx.RegisterModuleType("api_surface", ApiSurfaceFactory)
}
type ApiSurface struct {
android.ModuleBase
ExportableModuleBase
properties apiSurfaceProperties
allOutputs android.Paths
taggedOutputs map[string]android.Paths | }
func ApiSurfaceFactory() android.Module {
module := &ApiSurface{}
module.AddProperties(&module.properties)
android.InitAndroidModule(module)
InitExportableModule(module)
return module
}
func (surface *ApiSurface) DepsMutator(ctx android.BottomUpMutatorContext) {
if surface.properties.Contributions != nil {
ctx.AddVariationDependencies(nil, nil, surface.properties.Contributions...)
}
}
func (surface *ApiSurface) GenerateAndroidBuildActions(ctx android.ModuleContext) {
contributionFiles := make(map[string]android.Paths)
var allOutputs android.Paths
ctx.WalkDeps(func(child, parent android.Module) bool {
if contribution, ok := child.(ApiContribution); ok {
copied := contribution.CopyFilesWithTag(ctx)
for tag, files := range copied {
contributionFiles[child.Name()+"#"+tag] = files
}
for _, paths := range copied {
allOutputs = append(allOutputs, paths...)
}
return false // no transitive dependencies
}
return false
})
// phony target
ctx.Build(pctx, android.BuildParams{
Rule: blueprint.Phony,
Output: android.PathForPhony(ctx, ctx.ModuleName()),
Inputs: allOutputs,
})
surface.allOutputs = allOutputs
surface.taggedOutputs = contributionFiles
}
func (surface *ApiSurface) OutputFiles(tag string) (android.Paths, error) {
if tag != "" {
return nil, fmt.Errorf("unknown tag: %q", tag)
}
return surface.allOutputs, nil
}
func (surface *ApiSurface) TaggedOutputs() map[string]android.Paths {
return surface.taggedOutputs
}
func (surface *ApiSurface) Exportable() bool {
return true
}
var _ android.OutputFileProducer = (*ApiSurface)(nil)
var _ Exportable = (*ApiSurface)(nil)
type ApiContribution interface {
// copy files necessaryt to construct an API surface
// For C, it will be map.txt and .h files
// For Java, it will be api.txt
CopyFilesWithTag(ctx android.ModuleContext) map[string]android.Paths // output paths
// Generate Android.bp in out/ to use the exported .txt files
// GenerateBuildFiles(ctx ModuleContext) Paths //output paths
} | }
type apiSurfaceProperties struct {
Contributions []string |
blocklogic.py | from hashlib import sha256
import json
import time
import multiprocessing
import time
import numpy as np
class Block:
def __init__(
self, depth, transactions, timestamp,
previous_hash, nonce=0
):
self.depth = depth
self.transactions = transactions
self.timestamp = timestamp
self.previous_hash = previous_hash
self.nonce = nonce
def compute_hash(self):
'''
A function that return the hash of the block contents.
'''
block_str = json.dumps(self.__dict__, sort_keys=True)
return sha256(block_str.encode()).hexdigest()
def __eq__(self, other):
'''
Overloading the equality operator
'''
return self.__dict__ == other.__dict__
class Blockchain:
'''
Blockchain class;
Inspired from IBM version at the moment.
'''
difficulty = 4
block_capacity = 3
def __init__(self):
'''
Choose initial difficulty and
create the genesis block
[1] They are the orphans and stale blocks
It's a list of lists where we also
store the block leading to the orphan.
That block is stored multiple time (also
in the longest chain)
'''
# Transactions to be mined
self.outstanding_transactions = []
# Consensus chain and extensions, see [1]
self.chain = []
self.extensions = []
# Create genesis block
self.create_genesis_block()
def create_genesis_block(self):
"""
A function to generate genesis block and appends it to
the chain. The block has index 0, previous_hash as 0, and
a valid hash.
"""
genesis_block = Block(0, [], 0, "0")
genesis_block.hash = genesis_block.compute_hash()
self.chain.append(genesis_block)
@property
def last_block(self):
return self.chain[-1]
def add_block_longest(self, block, proof):
"""
Attempt to add a block after checking the validity of
the provided proof. Append to longest chain.
"""
# Reject if previous hash not accurate
if self.last_block.hash != block.previous_hash:
return False
# Reject if proof is not valid hash
if not Blockchain.is_valid_proof(block, proof):
return False
block.hash = proof
self.chain.append(block)
return True
def add_block(
self, block, proof, base_block
):
"""
Attempt to add a block after checking the validity of
the provided proof. Append to longest chain.
:param base_block: the base block receiving the potential new block
[1] If base_block is not last block in longest chain,
check all extensions for their last block. If again, none
of the extensions have the base_block as their last, create
another extension. You could have nested extensions because of
this, but shouldn't care.
"""
# If the base block is the last block
# in longest chain, just use regular add
if base_block == self.last_block:
return self.add_block_longest(block, proof)
# Previous hash should be accurate, reject otherwise
if base_block.hash != block.previous_hash:
return False
# Reject if proof is not valid hash of block
if not Blockchain.is_valid_proof(block, proof):
return False
# If checks passed, update the block's hash
block.hash = proof
# Check all extensions for the base block
# See add_block.[1]
for ext_idx in range(self.extensions):
# Check each last block in extensions
if base_block == self.extensions[ext_idx][-1]:
# If found, proceed there
self.extensions[ext_idx].append(block)
return True
# If not found there, create extension
self.extensions.append([base_block, block])
return True
def internal_consensus(self):
'''
Method to update to longest chain using possibly
larger extensions. So it checks if any extension
is longer than current chain. In case of a change,
the tail of the current chain becomes a new extension.
[1] If any update happens, return True and stop
since another one is impossible. This is because
we are calling this at each mine, so changes are
continuously updated.
'''
for ext in self.extensions:
if ext[-1].depth > self.last_block.depth:
fork_depth = ext[0].depth
# Create new extension with chain to be
# dumped
self.extensions.append(
self.chain[fork_depth:]
)
# Remove and store chain tail until
# depth of fork node, then add extension
# tail to now have longest chain
while self.last_block.depth >= fork_depth:
self.chain.pop()
self.chain = self.chain + ext
# See internal_consensus.[1]
return True
# If no internal consensus update, return False
return False
@staticmethod
def proof_of_work(block, work_time = None):
"""
Do proof of work and stop after a work_time seconds.
:param starting_nonce: can store progress
:param work_time: storing progress requires early stopping
and we're using a potentially pre-set time
| if work_time is None:
work_time = float('inf')
start = time.time()
# Start from 0, flexibility here to be debated
block.nonce = 0
# Do computational work
computed_hash = block.compute_hash()
while not computed_hash.startswith('0' * Blockchain.difficulty):
block.nonce += 1
computed_hash = block.compute_hash()
# Return if out of time
if (time.time() - start) > work_time:
return
# Return good hash
return computed_hash
def add_new_transaction(self, transaction):
self.outstanding_transactions.append(transaction)
def remove_front_transactions(self):
self.outstanding_transactions = self.outstanding_transactions[Blockchain.block_capacity:]
def get_outstanding_transactions(self):
return self.outstanding_transactions
@classmethod
def is_valid_proof(cls, block, block_hash):
"""
Check if block_hash is valid hash of block and satisfies
the difficulty criteria.
"""
return (block_hash.startswith('0' * Blockchain.difficulty) and
block_hash == block.compute_hash()) | """
# Parse work_time None to inf
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.