prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>Interface.js<|end_file_name|><|fim▁begin|>/* ************************************************************************ <|fim▁hole|> http://qooxdoo.org Copyright: 2004-2008 1&1 Internet AG, Germany, http://www.1und1.de License: LGPL: http://www.gnu.org/licenses/lgpl.html EPL: http://www.eclipse.org/org/documents/epl-v10.php See the LICENSE file in the project's top-level directory for details. Authors: * Sebastian Werner (wpbasti) * Andreas Ecker (ecker) ************************************************************************ */ /** * This class is used to define interfaces (similar to Java interfaces). * * See the description of the {@link #define} method how an interface is * defined. */ qx.Bootstrap.define("qx.Interface", { statics : { /* --------------------------------------------------------------------------- PUBLIC API --------------------------------------------------------------------------- */ /** * Define a new interface. Interface definitions look much like class definitions. * * The main difference is that the bodies of functions defined in <code>members</code> * and <code>statics</code> are called before the original function with the * same arguments. This can be used to check the passed arguments. If the * checks fail, an exception should be thrown. It is convenient to use the * method defined in {@link qx.core.MAssert} to check the arguments. * * In the <code>build</code> version the checks are omitted. * * For properties only the names are required so the value of the properties * can be empty maps. * * Example: * <pre class='javascript'> * qx.Interface.define("name", * { * extend: [SuperInterfaces], * * statics: * { * PI : 3.14 * }, * * properties: {"color": {}, "name": {} }, * * members: * { * meth1: function() {}, * meth2: function(a, b) { this.assertArgumentsCount(arguments, 2, 2); }, * meth3: function(c) { this.assertInterface(c.constructor, qx.some.Interface); } * }, * * events : * { * keydown : "qx.event.type.KeySequence" * } * }); * </pre> * * @param name {String} name of the interface * @param config {Map ? null} Interface definition structure. The configuration map has the following keys: * <table> * <tr><th>Name</th><th>Type</th><th>Description</th></tr> * <tr><th>extend</th><td>Interface |<br>Interface[]</td><td>Single interface or array of interfaces this interface inherits from.</td></tr> * <tr><th>members</th><td>Map</td><td>Map of members of the interface.</td></tr> * <tr><th>statics</th><td>Map</td><td> * Map of statics of the interface. The statics will not get copied into the target class. * This is the same behaviour as statics in mixins ({@link qx.Mixin#define}). * </td></tr> * <tr><th>properties</th><td>Map</td><td>Map of properties and their definitions.</td></tr> * <tr><th>events</th><td>Map</td><td>Map of event names and the corresponding event class name.</td></tr> * </table> */ define : function(name, config) { if (config) { // Normalize include if (config.extend && !(qx.Bootstrap.getClass(config.extend) === "Array")) { config.extend = [config.extend]; } // Validate incoming data if (qx.core.Environment.get("qx.debug")) { this.__validateConfig(name, config); } // Create interface from statics var iface = config.statics ? config.statics : {}; // Attach configuration if (config.extend) { iface.$$extends = config.extend; } if (config.properties) { iface.$$properties = config.properties; } if (config.members) { iface.$$members = config.members; } if (config.events) { iface.$$events = config.events; } } else { // Create empty interface var iface = {}; } // Add Basics iface.$$type = "Interface"; iface.name = name; // Attach toString iface.toString = this.genericToString; // Assign to namespace iface.basename = qx.Bootstrap.createNamespace(name, iface); // Add to registry qx.Interface.$$registry[name] = iface; // Return final interface return iface; }, /** * Returns an interface by name * * @param name {String} class name to resolve * @return {Class} the class */ getByName : function(name) { return this.$$registry[name]; }, /** * Determine if interface exists * * @param name {String} Interface name to check * @return {Boolean} true if interface exists */ isDefined : function(name) { return this.getByName(name) !== undefined; }, /** * Determine the number of interfaces which are defined * * @return {Number} the number of interfaces */ getTotalNumber : function() { return qx.Bootstrap.objectGetLength(this.$$registry); }, /** * Generates a list of all interfaces including their super interfaces * (resolved recursively) * * @param ifaces {Interface[] ? []} List of interfaces to be resolved * @return {Array} List of all interfaces */ flatten : function(ifaces) { if (!ifaces) { return []; } // we need to create a copy and not to modify the existing array var list = ifaces.concat(); for (var i=0, l=ifaces.length; i<l; i++) { if (ifaces[i].$$extends) { list.push.apply(list, this.flatten(ifaces[i].$$extends)); } } return list; }, /** * Assert members * * @param object {qx.core.Object} The object, which contains the methods * @param clazz {Class} class of the object * @param iface {Interface} the interface to verify * @param wrap {Boolean ? false} wrap functions required by interface to * check parameters etc. */ __assertMembers : function(object, clazz, iface, wrap) { // Validate members var members = iface.$$members; if (members) { for (var key in members) { if (qx.Bootstrap.isFunction(members[key])) { var isPropertyMethod = this.__isPropertyMethod(clazz, key); var hasMemberFunction = isPropertyMethod || qx.Bootstrap.isFunction(object[key]); if (!hasMemberFunction) { throw new Error( 'Implementation of method "' + key + '" is missing in class "' + clazz.classname + '" required by interface "' + iface.name + '"' ); } // Only wrap members if the interface was not been applied yet. This // can easily be checked by the recursive hasInterface method. var shouldWrapFunction = wrap === true && !isPropertyMethod && !qx.util.OOUtil.hasInterface(clazz, iface); if (shouldWrapFunction) { object[key] = this.__wrapInterfaceMember( iface, object[key], key, members[key] ); } } else { // Other members are not checked more detailed because of // JavaScript's loose type handling if (typeof object[key] === undefined) { if (typeof object[key] !== "function") { throw new Error( 'Implementation of member "' + key + '" is missing in class "' + clazz.classname + '" required by interface "' + iface.name + '"' ); } } } } } }, /** * Internal helper to detect if the method will be generated by the * property system. * * @param clazz {Class} The current class. * @param methodName {String} The name of the method. * * @return {Boolean} true, if the method will be generated by the property * system. */ __isPropertyMethod: function(clazz, methodName) { var match = methodName.match(/^(is|toggle|get|set|reset)(.*)$/); if (!match) { return false; } var propertyName = qx.Bootstrap.firstLow(match[2]); var isPropertyMethod = qx.util.OOUtil.getPropertyDefinition(clazz, propertyName); if (!isPropertyMethod) { return false; } var isBoolean = match[0] == "is" || match[0] == "toggle"; if (isBoolean) { return qx.util.OOUtil.getPropertyDefinition(clazz, propertyName).check == "Boolean"; } return true; }, /** * Assert properties * * @param clazz {Class} class to check interface for * @param iface {Interface} the interface to verify */ __assertProperties : function(clazz, iface) { if (iface.$$properties) { for (var key in iface.$$properties) { if (!qx.util.OOUtil.getPropertyDefinition(clazz, key)) { throw new Error( 'The property "' + key + '" is not supported by Class "' + clazz.classname + '"!' ); } } } }, /** * Assert events * * @param clazz {Class} class to check interface for * @param iface {Interface} the interface to verify */ __assertEvents : function(clazz, iface) { if (iface.$$events) { for (var key in iface.$$events) { if (!qx.util.OOUtil.supportsEvent(clazz, key)) { throw new Error( 'The event "' + key + '" is not supported by Class "' + clazz.classname + '"!' ); } } } }, /** * Asserts that the given object implements all the methods defined in the * interface. This method throws an exception if the object does not * implement the interface. * * @param object {qx.core.Object} Object to check interface for * @param iface {Interface} The interface to verify */ assertObject : function(object, iface) { var clazz = object.constructor; this.__assertMembers(object, clazz, iface, false); this.__assertProperties(clazz, iface); this.__assertEvents(clazz, iface); // Validate extends, recursive var extend = iface.$$extends; if (extend) { for (var i=0, l=extend.length; i<l; i++) { this.assertObject(object, extend[i]); } } }, /** * Checks if an interface is implemented by a class * * @param clazz {Class} class to check interface for * @param iface {Interface} the interface to verify * @param wrap {Boolean ? false} wrap functions required by interface to * check parameters etc. */ assert : function(clazz, iface, wrap) { this.__assertMembers(clazz.prototype, clazz, iface, wrap); this.__assertProperties(clazz, iface); this.__assertEvents(clazz, iface); // Validate extends, recursive var extend = iface.$$extends; if (extend) { for (var i=0, l=extend.length; i<l; i++) { this.assert(clazz, extend[i], wrap); } } }, /* --------------------------------------------------------------------------- PRIVATE/INTERNAL API --------------------------------------------------------------------------- */ /** * This method will be attached to all interface to return * a nice identifier for them. * * @internal * @return {String} The interface identifier */ genericToString : function() { return "[Interface " + this.name + "]"; }, /** Registry of all defined interfaces */ $$registry : {}, /** * Wrap a method with a precondition check. * * @signature function(iface, origFunction, functionName, preCondition) * @param iface {String} Name of the interface, where the pre condition * was defined. (Used in error messages). * @param origFunction {Function} function to wrap. * @param functionName {String} name of the function. (Used in error messages). * @param preCondition {Function}. This function gets called with the arguments of the * original function. If this function return true the original function is called. * Otherwise an exception is thrown. * @return {Function} wrapped function */ __wrapInterfaceMember : qx.core.Environment.select("qx.debug", { "true": function(iface, origFunction, functionName, preCondition) { function wrappedFunction() { // call precondition preCondition.apply(this, arguments); // call original function return origFunction.apply(this, arguments); } origFunction.wrapper = wrappedFunction; return wrappedFunction; }, "default" : function() {} }), /** {Map} allowed keys in interface definition */ __allowedKeys : qx.core.Environment.select("qx.debug", { "true": { "extend" : "object", // Interface | Interface[] "statics" : "object", // Map "members" : "object", // Map "properties" : "object", // Map "events" : "object" // Map }, "default" : null }), /** * Validates incoming configuration and checks keys and values * * @signature function(name, config) * @param name {String} The name of the class * @param config {Map} Configuration map */ __validateConfig : qx.core.Environment.select("qx.debug", { "true": function(name, config) { if (qx.core.Environment.get("qx.debug")) { // Validate keys var allowed = this.__allowedKeys; for (var key in config) { if (allowed[key] === undefined) { throw new Error('The configuration key "' + key + '" in class "' + name + '" is not allowed!'); } if (config[key] == null) { throw new Error("Invalid key '" + key + "' in interface '" + name + "'! The value is undefined/null!"); } if (allowed[key] !== null && typeof config[key] !== allowed[key]) { throw new Error('Invalid type of key "' + key + '" in interface "' + name + '"! The type of the key must be "' + allowed[key] + '"!'); } } // Validate maps var maps = [ "statics", "members", "properties", "events" ]; for (var i=0, l=maps.length; i<l; i++) { var key = maps[i]; if (config[key] !== undefined && ([ "Array", "RegExp", "Date" ].indexOf(qx.Bootstrap.getClass(config[key])) != -1 || config[key].classname !== undefined)) { throw new Error('Invalid key "' + key + '" in interface "' + name + '"! The value needs to be a map!'); } } // Validate extends if (config.extend) { for (var i=0, a=config.extend, l=a.length; i<l; i++) { if (a[i] == null) { throw new Error("Extends of interfaces must be interfaces. The extend number '" + i+1 + "' in interface '" + name + "' is undefined/null!"); } if (a[i].$$type !== "Interface") { throw new Error("Extends of interfaces must be interfaces. The extend number '" + i+1 + "' in interface '" + name + "' is not an interface!"); } } } // Validate statics if (config.statics) { for (var key in config.statics) { if (key.toUpperCase() !== key) { throw new Error('Invalid key "' + key + '" in interface "' + name + '"! Static constants must be all uppercase.'); } switch(typeof config.statics[key]) { case "boolean": case "string": case "number": break; default: throw new Error('Invalid key "' + key + '" in interface "' + name + '"! Static constants must be all of a primitive type.') } } } } }, "default" : function() {} }) } });<|fim▁end|>
qooxdoo - the new era of web development
<|file_name|>basic-worker.js<|end_file_name|><|fim▁begin|>'use strict' const getNamespace = require('continuation-local-storage').getNamespace const Promise = require('bluebird') const WorkerStopError = require('error-cat/errors/worker-stop-error') const Ponos = require('../') /** * A simple worker that will publish a message to a queue. * @param {object} job Object describing the job. * @param {string} job.queue Queue on which the message will be published. * @returns {promise} Resolved when the message is put on the queue. */ function basicWorker (job) { return Promise.try(() => { const tid = getNamespace('ponos').get('tid') if (!job.message) { throw new WorkerStopError('message is required', { tid: tid }) } console.log(`hello world: ${job.message}. tid: ${tid}`) })<|fim▁hole|> 'basic-queue-worker': basicWorker }, events: { 'basic-event-worker': basicWorker } }) server.start() .then(() => { console.log('server started') }) .catch((err) => { console.error('server error:', err.stack || err.message || err) }) process.on('SIGINT', () => { server.stop() .then(() => { console.log('server stopped') }) .catch((err) => { console.error('server error:', err.stack || err.message || err) }) })<|fim▁end|>
} const server = new Ponos.Server({ tasks: {
<|file_name|>parallel_nanmeanGaussWeightedFilterOptimizedSize20.py<|end_file_name|><|fim▁begin|>import sys sys.path.append("/mnt/moehlc/home/idaf_library") #import mahotas import vigra import libidaf.idafIO as io import numpy as np from scipy import ndimage from scipy.stats import nanmean #import matplotlib.pyplot as plt import time import pickle import os import multiprocessing as mp def gaussWeight(dat,sigma,mu): return 1./np.sqrt(2*np.pi*np.square(sigma))*np.exp(-np.square(dat-mu)/(2*np.square(sigma))) def streaming3Dfilter(data,outdata,sigma): fsize = int(np.round(sigma*3)) # filter size amax=np.array(data.shape)-1 #max index amin=amax-amax #min index xyz = np.array(data.nonzero())#coordinates x = xyz[0,:] y = xyz[1,:] z = xyz[2,:] datxyz = np.array(data[x,y,z]) for i in range(amax[0]+1): #x dim for j in range(amax[1]+1): # y dim for k in range(amax[2]+1): # z dim dist = np.sqrt(np.square(i-x) + np.square(j-y) + np.square(k-z)) ind = dist<= fsize weight = gaussWeight(dist[ind],sigma,0) datsel = datxyz[ind] if datsel.size == 0: outdata[i,j,k] = np.nan else: outdata[i,j,k] = np.average(datsel,weights = weight) print('writing slice ' + str(i) + 'to '+ outdata.filename) print('progress: ' + str(i/float(amax[0])*100) + ' percent done') outdata.flush() #write to disk def importStack(path,fname,tmpStackDir): absname = path +fname zsize = vigra.impex.numberImages(absname) im =vigra.readImage(absname, index = 0, dtype='FLOAT') #vol = np.zeros([im.height,im.width,zsize]) try: os.makedirs(tmpStackDir) except: print(tmpStackDir+' already exists') vol = np.memmap(tmpStackDir + fname[0:-4],dtype='float64',mode = 'w+', shape = (im.height,im.width,zsize)) #raise('hallo') for i in range(zsize): print("importing slice " + str(i) + ' of file '+fname) im=np.squeeze(vigra.readImage(absname, index = i, dtype='FLOAT')) vol[:,:,i] = im vol.flush() return vol def filterAndSave(fname,path,savepath,filterSize,volpath): vol = importStack(path,fname,volpath) try: os.makedirs(savepath) except: print(savepath+' already exists') res = np.memmap(savepath + 'filtered_Size_'+ str(filterSize) + fname,dtype = 'float64', mode = 'w+', shape = vol.shape) streaming3Dfilter(vol, res,filterSize) def filterAndSave_batch(pattern,path,savepath,filterSize,volpath): fnames = io.getFilelistFromDir(path,pattern) #list of tiff stacks to be filtered for i in range(len(fnames)): #for i in range(1): print('start filter process for '+fnames[i]) mp.Process(target = filterAndSave, args = (fnames[i],path,savepath,filterSize,volpath)).start() #parallel processing def filterAndSave_batch_serial(pattern,path,savepath,filterSize,volpath):<|fim▁hole|> fnames = io.getFilelistFromDir(path,pattern) #list of tiff stacks to be filtered for i in range(len(fnames)): #for i in range(1): print('start filter process for '+fnames[i]) filterAndSave(fnames[i],path,savepath,filterSize,volpath) #parallel processing if __name__ == '__main__': path = '/home/moehlc/raman_bloodvessel_dat/segmented/angio_wt/' savepath = '/home/moehlc/raman_bloodvessel_dat/filteredVoldDatGauss1/angio_wt/' volpath = '/home/moehlc/raman_bloodvessel_dat/rawVoldat2/angio_wt/' filterSize = 20 filterAndSave_batch('flowSkel',path,savepath,filterSize,volpath) filterAndSave_batch('distanceSkel',path,savepath,filterSize,volpath) #filterAndSave_batch_serial('flowSkel',path,savepath,filterSize) #filterAndSave_batch('distanceSkel',path,savepath,filterSize)<|fim▁end|>
<|file_name|>lighthouse-report-viewer.js<|end_file_name|><|fim▁begin|>/** * @license Copyright 2017 The Lighthouse Authors. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ 'use strict'; /* global DOM, ViewerUIFeatures, ReportRenderer, DragAndDrop, GithubApi, PSIApi, logger, idbKeyval */ /** @typedef {import('./psi-api').PSIParams} PSIParams */ /**<|fim▁hole|> * @return {HTMLElement} */ function find(query, context) { /** @type {?HTMLElement} */ const result = context.querySelector(query); if (result === null) { throw new Error(`query ${query} not found`); } return result; } /** * Class that manages viewing Lighthouse reports. */ class LighthouseReportViewer { constructor() { this._onPaste = this._onPaste.bind(this); this._onSaveJson = this._onSaveJson.bind(this); this._onFileLoad = this._onFileLoad.bind(this); this._onUrlInputChange = this._onUrlInputChange.bind(this); this._dragAndDropper = new DragAndDrop(this._onFileLoad); this._github = new GithubApi(); this._psi = new PSIApi(); /** * Used for tracking whether to offer to upload as a gist. * @type {boolean} */ this._reportIsFromGist = false; this._reportIsFromPSI = false; this._reportIsFromJSON = false; this._addEventListeners(); this._loadFromDeepLink(); this._listenForMessages(); } static get APP_URL() { return `${location.origin}${location.pathname}`; } /** * Initialize event listeners. * @private */ _addEventListeners() { document.addEventListener('paste', this._onPaste); const gistUrlInput = find('.js-gist-url', document); gistUrlInput.addEventListener('change', this._onUrlInputChange); // Hidden file input to trigger manual file selector. const fileInput = find('#hidden-file-input', document); fileInput.addEventListener('change', e => { if (!e.target) { return; } const inputTarget = /** @type {HTMLInputElement} */ (e.target); if (inputTarget.files) { this._onFileLoad(inputTarget.files[0]); } inputTarget.value = ''; }); // A click on the visual placeholder will trigger the hidden file input. const placeholderTarget = find('.viewer-placeholder-inner', document); placeholderTarget.addEventListener('click', e => { const target = /** @type {?Element} */ (e.target); if (target && target.localName !== 'input' && target.localName !== 'a') { fileInput.click(); } }); } /** * Attempts to pull gist id from URL and render report from it. * @return {Promise<void>} * @private */ _loadFromDeepLink() { const params = new URLSearchParams(location.search); const gistId = params.get('gist'); const psiurl = params.get('psiurl'); const jsonurl = params.get('jsonurl'); if (!gistId && !psiurl && !jsonurl) return Promise.resolve(); this._toggleLoadingBlur(true); let loadPromise = Promise.resolve(); if (psiurl) { loadPromise = this._fetchFromPSI({ url: psiurl, category: params.has('category') ? params.getAll('category') : undefined, strategy: params.get('strategy') || undefined, locale: params.get('locale') || undefined, utm_source: params.get('utm_source') || undefined, }); } else if (gistId) { loadPromise = this._github.getGistFileContentAsJson(gistId).then(reportJson => { this._reportIsFromGist = true; this._replaceReportHtml(reportJson); }).catch(err => logger.error(err.message)); } else if (jsonurl) { const firebaseAuth = this._github.getFirebaseAuth(); loadPromise = firebaseAuth.getAccessTokenIfLoggedIn() .then(token => { return token ? Promise.reject(new Error('Can only use jsonurl when not logged in')) : null; }) .then(() => fetch(jsonurl)) .then(resp => resp.json()) .then(json => { this._reportIsFromJSON = true; this._replaceReportHtml(json); }) .catch(err => logger.error(err.message)); } return loadPromise.finally(() => this._toggleLoadingBlur(false)); } /** * Basic Lighthouse report JSON validation. * @param {LH.Result} reportJson * @private */ _validateReportJson(reportJson) { if (!reportJson.lighthouseVersion) { throw new Error('JSON file was not generated by Lighthouse'); } // Leave off patch version in the comparison. const semverRe = new RegExp(/^(\d+)?\.(\d+)?\.(\d+)$/); const reportVersion = reportJson.lighthouseVersion.replace(semverRe, '$1.$2'); const lhVersion = window.LH_CURRENT_VERSION.replace(semverRe, '$1.$2'); if (reportVersion < lhVersion) { // TODO: figure out how to handler older reports. All permalinks to older // reports will start to throw this warning when the viewer rev's its // minor LH version. // See https://github.com/GoogleChrome/lighthouse/issues/1108 logger.warn('Results may not display properly.\n' + 'Report was created with an earlier version of ' + `Lighthouse (${reportJson.lighthouseVersion}). The latest ` + `version is ${window.LH_CURRENT_VERSION}.`); } } /** * @param {LH.Result} json * @private */ // TODO: Really, `json` should really have type `unknown` and // we can have _validateReportJson verify that it's an LH.Result _replaceReportHtml(json) { // Allow users to view the runnerResult if ('lhr' in json) { json = /** @type {LH.RunnerResult} */ (json).lhr; } // Install as global for easier debugging // @ts-ignore window.__LIGHTHOUSE_JSON__ = json; // eslint-disable-next-line no-console console.log('window.__LIGHTHOUSE_JSON__', json); this._validateReportJson(json); // Redirect to old viewer if a v2 report. v3, v4, v5 handled by v5 viewer. if (json.lighthouseVersion.startsWith('2')) { this._loadInLegacyViewerVersion(json); return; } const dom = new DOM(document); const renderer = new ReportRenderer(dom); const container = find('main', document); try { renderer.renderReport(json, container); // Only give gist-saving callback if current report isn't from a gist. let saveCallback = null; if (!this._reportIsFromGist) { saveCallback = this._onSaveJson; } // Only clear query string if current report isn't from a gist or PSI. if (!this._reportIsFromGist && !this._reportIsFromPSI && !this._reportIsFromJSON) { history.pushState({}, '', LighthouseReportViewer.APP_URL); } const features = new ViewerUIFeatures(dom, saveCallback); features.initFeatures(json); } catch (e) { logger.error(`Error rendering report: ${e.message}`); dom.resetTemplates(); // TODO(bckenny): hack container.textContent = ''; throw e; } finally { this._reportIsFromGist = this._reportIsFromPSI = this._reportIsFromJSON = false; } // Remove the placeholder UI once the user has loaded a report. const placeholder = document.querySelector('.viewer-placeholder'); if (placeholder) { placeholder.remove(); } if (window.ga) { window.ga('send', 'event', 'report', 'view'); } } /** * Updates the page's HTML with contents of the JSON file passed in. * @param {File} file * @return {Promise<void>} * @throws file was not valid JSON generated by Lighthouse or an unknown file * type was used. * @private */ _onFileLoad(file) { return this._readFile(file).then(str => { let json; try { json = JSON.parse(str); } catch (e) { throw new Error('Could not parse JSON file.'); } this._replaceReportHtml(json); }).catch(err => logger.error(err.message)); } /** * Stores v2.x report in IDB, then navigates to legacy viewer in current tab. * @param {LH.Result} reportJson * @private */ _loadInLegacyViewerVersion(reportJson) { const warnMsg = `Version mismatch between viewer and JSON. Opening compatible viewer...`; logger.log(warnMsg, false); // Place report in IDB, then navigate current tab to the legacy viewer const viewerPath = new URL('../viewer2x/', location.href); idbKeyval.set('2xreport', reportJson).then(_ => { window.location.href = viewerPath.href; }); } /** * Reads a file and returns its content as a string. * @param {File} file * @return {Promise<string>} * @private */ _readFile(file) { return new Promise((resolve, reject) => { const reader = new FileReader(); reader.onload = function(e) { const readerTarget = /** @type {?FileReader} */ (e.target); const result = /** @type {?string} */ (readerTarget && readerTarget.result); if (!result) { reject('Could not read file'); return; } resolve(result); }; reader.onerror = reject; reader.readAsText(file); }); } /** * Saves the current report by creating a gist on GitHub. * @param {LH.Result} reportJson * @return {Promise<string|void>} id of the created gist. * @private */ _onSaveJson(reportJson) { if (window.ga) { window.ga('send', 'event', 'report', 'share'); } // TODO: find and reuse existing json gist if one exists. return this._github.createGist(reportJson).then(id => { if (window.ga) { window.ga('send', 'event', 'report', 'created'); } history.pushState({}, '', `${LighthouseReportViewer.APP_URL}?gist=${id}`); return id; }).catch(err => logger.log(err.message)); } /** * Enables pasting a JSON report or gist URL on the page. * @param {ClipboardEvent} e * @private */ _onPaste(e) { if (!e.clipboardData) return; e.preventDefault(); // Try paste as gist URL. try { const url = new URL(e.clipboardData.getData('text')); this._loadFromGistURL(url.href); if (window.ga) { window.ga('send', 'event', 'report', 'paste-link'); } } catch (err) { // noop } // Try paste as json content. try { const json = JSON.parse(e.clipboardData.getData('text')); this._replaceReportHtml(json); if (window.ga) { window.ga('send', 'event', 'report', 'paste'); } } catch (err) { } } /** * Handles changes to the gist url input. * @param {Event} e * @private */ _onUrlInputChange(e) { e.stopPropagation(); if (!e.target) { return; } const inputElement = /** @type {HTMLInputElement} */ (e.target); try { this._loadFromGistURL(inputElement.value); } catch (err) { logger.error('Invalid URL'); } } /** * Loads report json from gist URL, if valid. Updates page URL with gist ID * and loads from github. * @param {string} urlStr Gist URL. * @private */ _loadFromGistURL(urlStr) { try { const url = new URL(urlStr); if (url.origin !== 'https://gist.github.com') { logger.error('URL was not a gist'); return; } const match = url.pathname.match(/[a-f0-9]{5,}/); if (match) { history.pushState({}, '', `${LighthouseReportViewer.APP_URL}?gist=${match[0]}`); this._loadFromDeepLink(); } } catch (err) { logger.error('Invalid URL'); } } /** * Initializes of a `message` listener to respond to postMessage events. * @private */ _listenForMessages() { window.addEventListener('message', e => { if (e.source === self.opener && e.data.lhresults) { this._replaceReportHtml(e.data.lhresults); if (self.opener && !self.opener.closed) { self.opener.postMessage({rendered: true}, '*'); } if (window.ga) { window.ga('send', 'event', 'report', 'open in viewer'); } } }); // If the page was opened as a popup, tell the opening window we're ready. if (self.opener && !self.opener.closed) { self.opener.postMessage({opened: true}, '*'); } } /** * @param {PSIParams} params */ _fetchFromPSI(params) { logger.log('Waiting for Lighthouse results ...'); return this._psi.fetchPSI(params).then(response => { logger.hide(); if (!response.lighthouseResult) { if (response.error) { // eslint-disable-next-line no-console console.error(response.error); logger.error(response.error.message); } else { logger.error('PSI did not return a Lighthouse Result'); } return; } this._reportIsFromPSI = true; this._replaceReportHtml(response.lighthouseResult); }); } /** * @param {boolean} force */ _toggleLoadingBlur(force) { const placeholder = document.querySelector('.viewer-placeholder-inner'); if (placeholder) placeholder.classList.toggle('lh-loading', force); } } // node export for testing. if (typeof module !== 'undefined' && module.exports) { module.exports = LighthouseReportViewer; }<|fim▁end|>
* Guaranteed context.querySelector. Always returns an element or throws if * nothing matches query. * @param {string} query * @param {ParentNode} context
<|file_name|>udp.py<|end_file_name|><|fim▁begin|>''' Created on 9 jan. 2013 @author: sander ''' from bitstring import BitStream, ConstBitStream, Bits from ipaddress import IPv4Address, IPv6Address from pylisp.packet.ip import protocol_registry from pylisp.packet.ip.protocol import Protocol from pylisp.utils import checksum import numbers class UDPMessage(Protocol): header_type = 17 def __init__(self, source_port=0, destination_port=0, checksum=0, payload=''): # Call the superclass constructor super(UDPMessage, self).__init__(payload=payload) self.source_port = source_port self.destination_port = destination_port self.checksum = checksum def sanitize(self): ''' Check if the current settings conform to the RFC and fix where possible ''' # Check ports if not isinstance(self.source_port, numbers.Integral) \ or self.source_port < 0 \ or self.source_port >= 2 ** 16: raise ValueError('Invalid source port') if not isinstance(self.destination_port, numbers.Integral) \ or self.destination_port < 0 \ or self.destination_port >= 2 ** 16: raise ValueError('Invalid destination port') # We can't calculate the checksum because we don't know enough by # ourself to construct the pseudo-header def generate_pseudo_header(self, source, destination): # Calculate the length of the UDP layer udp_length = 8 + len(bytes(self.payload)) if isinstance(source, IPv4Address) \ and isinstance(destination, IPv4Address): # Generate an IPv4 pseudo-header header = BitStream('uint:32=%d, ' 'uint:32=%d, ' 'uint:16=17, ' 'uint:16=%d' % (int(source), int(destination), udp_length)) elif isinstance(source, IPv6Address) \ and isinstance(destination, IPv6Address): # Generate an IPv6 pseudo-header header = BitStream('uint:128=%d, ' 'uint:128=%d, ' 'uint:32=%d, ' 'uint:32=17' % (int(source), int(destination), udp_length)) else: raise ValueError('Source and destination must belong to the same ' 'IP version') # Return the header bytes return header.bytes def calculate_checksum(self, source, destination): # Calculate the pseudo-header for the checksum calculation pseudo_header = self.generate_pseudo_header(source, destination) # Remember the current checksum, generate a message and restore the # original checksum old_checksum = self.checksum self.checksum = 0 message = self.to_bytes() self.checksum = old_checksum # Calculate the checksum my_checksum = checksum.ones_complement(pseudo_header + message) # If the computed checksum is zero, it is transmitted as all ones (the # equivalent in one's complement arithmetic). An all zero transmitted # checksum value means that the transmitter generated no checksum (for # debugging or for higher level protocols that don't care). if my_checksum == 0: my_checksum = 0xffff return my_checksum def verify_checksum(self, source, destination): # An all zero transmitted checksum value means that the transmitter # generated no checksum (for debugging or for higher level protocols # that don't care). if self.checksum == 0: return True return self.checksum == self.calculate_checksum(source, destination) def get_lisp_message(self, only_data=False, only_control=False): # Check the UDP ports lisp_data = (self.source_port == 4341 or self.destination_port == 4341) lisp_control = (self.source_port == 4342 or self.destination_port == 4342) if lisp_data and lisp_control: raise ValueError("Cannot mix LISP data and control ports") from pylisp.packet.lisp.control.base import ControlMessage from pylisp.packet.lisp.data import DataPacket<|fim▁hole|> if not isinstance(self.payload, DataPacket): raise ValueError("Payload is not a LISP data packet") return self.payload elif lisp_control or only_control: if not isinstance(self.payload, ControlMessage): raise ValueError("Payload is not a LISP control message") return self.payload else: raise ValueError("No LISP content found") def get_lisp_data_packet(self): return self.get_lisp_message(only_data=True) def get_lisp_control_message(self): return self.get_lisp_message(only_control=True) @classmethod def from_bytes(cls, bitstream): ''' Parse the given packet and update properties accordingly ''' packet = cls() # Convert to ConstBitStream (if not already provided) if not isinstance(bitstream, ConstBitStream): if isinstance(bitstream, Bits): bitstream = ConstBitStream(auto=bitstream) else: bitstream = ConstBitStream(bytes=bitstream) # Read the source and destination ports (packet.source_port, packet.destination_port) = bitstream.readlist('2*uint:16') # Store the length length = bitstream.read('uint:16') if length < 8: raise ValueError('Invalid UDP length') # Read the checksum packet.checksum = bitstream.read('uint:16') # And the rest is payload payload_bytes = length - 8 packet.payload = bitstream.read('bytes:%d' % payload_bytes) # LISP-specific handling if packet.source_port == 4341 or packet.destination_port == 4341: # Payload is a LISP data packet from pylisp.packet.lisp.data import DataPacket packet.payload = DataPacket.from_bytes(packet.payload) elif packet.source_port == 4342 or packet.destination_port == 4342: # Payload is a LISP control message from pylisp.packet.lisp.control.base import ControlMessage packet.payload = ControlMessage.from_bytes(packet.payload) # There should be no remaining bits if bitstream.pos != bitstream.len: raise ValueError('Bits remaining after processing packet') # Verify that the properties make sense packet.sanitize() return packet def to_bytes(self): ''' Create bytes from properties ''' # Verify that the properties make sense self.sanitize() # Write the source and destination ports bitstream = BitStream('uint:16=%d, ' 'uint:16=%d' % (self.source_port, self.destination_port)) # Write the length payload_bytes = bytes(self.payload) length = len(payload_bytes) + 8 bitstream += BitStream('uint:16=%d' % length) # Write the checksum bitstream += BitStream('uint:16=%d' % self.checksum) return bitstream.bytes + payload_bytes # Register this header type protocol_registry.register_type_class(UDPMessage)<|fim▁end|>
if lisp_data or only_data:
<|file_name|>AnalysisWinPIRALog_LINUX.py<|end_file_name|><|fim▁begin|># coding=utf-8 from __future__ import absolute_import, division, print_function, unicode_literals """ Name: AnalysisWinPIRALog_LINUX Author: Andy Liu Email : [email protected] Created: 3/24/2015 Copyright: Copyright ©Intel Corporation. All rights reserved. Licence: This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import argparse import logging import os import re import sys import xlwt from copy import deepcopy from pprint import pformat from AnalysisWinPIRALog.MyLog import init_logger from encoder import XML2Dict class AnalysisLog: def __init__(self, _config_file, _log_file): self._config_file = _config_file self._log_file = _log_file self.config_dict = dict() self.log_dict = dict() self.result_list = list() self.start = re.compile(r'^[0-9a-f]{2}:[0-9a-f]{2}\.\d') self._key_word = 'DEV_NAME' self.return_value = True def parse_config_file(self): logging.debug('Into function parse_config_file') with open(self._config_file, 'r') as f: _xml_str = f.read() try: _obj = XML2Dict(coding='utf-8') self.config_dict = _obj.parse(_xml_str) logging.debug('config_dict : %s' % pformat(self.config_dict)) logging.info('Parse config file done') return self.config_dict except Exception, e: logging.error("Can't parse as XML!") logging.exception(e) sys.exit(1) # def warning_duplicate_dev_add(self): # logging.debug('Into warning_duplicate_dev_add') # _verify_list = list() # for _dev_type, _expect_values in self.config_dict.get('xml').iteritems(): # if isinstance(_expect_values, list): # for _expect_value in _expect_values: # if _expect_value.get(self._key_word) in _verify_list: # logging.error('Duplicate device address : %s' % _expect_value.get(self._key_word)) # sys.exit(1) # else: # _verify_list.append(_expect_value.get(self._key_word)) # elif isinstance(_expect_values, dict): # if _expect_values.get(self._key_word) in _verify_list: # logging.error('Duplicate device address : %s' % _expect_values.get(self._key_word)) # sys.exit(1) # else: # _verify_list.append(_expect_values.get(self._key_word)) # if len(_verify_list) == 0: # logging.error("Can't find key word <%s>" % self._key_word) # sys.exit(1) # logging.info('Verify duplicate device address done') # return True def parse_log_file(self): logging.debug('Into parse_log_file') _record = dict() _dev_name = '' with open(self._log_file, 'r') as f: # remove header and footer in log file for _line in f.readlines(): _line = _line.strip() if _line and ':' in _line: if re.findall(self.start, _line): if _record: self.log_dict.update({_dev_name.strip(): deepcopy(_record)}) _record.clear() _bus_no, _dev_name = _line.split(' ', 1) _record.update({'BUS_NO': _bus_no.strip(), 'DEV_NAME': _dev_name.strip()}) else: _key, _value = _line.split(':', 1) _record.update({_key.strip(): _value.strip()}) else: self.log_dict.update({_dev_name.strip(): deepcopy(_record)}) pass logging.debug('log_dict : %s' % pformat(self.log_dict)) logging.info('Parse log file done') return self.log_dict def verify_result(self): for _dev_type, _expect_values in self.config_dict.get('xml').iteritems(): if isinstance(_expect_values, list): logging.debug('_expect_values is list') for _expect_value in _expect_values: _key_word = _expect_value.get(self._key_word) if _key_word in self.log_dict: _record = self.log_dict.get(_key_word) if self.compare_result(_expect_value, _record): _record.update({'Result': 'PASSED'}) else: _record.update({'Result': 'FAILED'}) self.return_value = False self.result_list.append(_record) else: self.result_list.append({self._key_word: _key_word, 'Result': 'Not Found'}) self.return_value = False<|fim▁hole|> if _key_word in self.log_dict: _record = self.log_dict.get(_key_word) if self.compare_result(_expect_values, _record): _record.update({'Result': 'PASSED'}) else: _record.update({'Result': 'FAILED'}) self.return_value = False self.result_list.append(_record) else: self.result_list.append({self._key_word: _key_word, 'Result': 'Not Found'}) self.return_value = False logging.debug('result_list : %s' % pformat(self.result_list)) logging.info('Verify result done') @staticmethod def compare_result(_expect_value, _record): """ expect_value: {'DEV_NAME': 'PCI bridge: Intel Corporation Haswell-E PCI Express Root Port 1 (rev 02) (prog-if 00 [Normal decode])'} _record: {'ACSCap': 'SrcValid+ TransBlk+ ReqRedir+ CmpltRedir+ UpstreamFwd+ EgressCtrl- DirectTrans-', 'ACSCtl': 'SrcValid- TransBlk- ReqRedir- CmpltRedir- UpstreamFwd- EgressCtrl- DirectTrans-', 'AERCap': 'First Error Pointer: 00, GenCap- CGenEn- ChkCap- ChkEn-', 'Address': 'fee00438 Data: 0000', 'BUS_NO': '00:01.0', 'BridgeCtl': 'Parity+ SERR+ NoISA- VGA- MAbort- >Reset- FastB2B-', 'Bus': 'primary=00, secondary=01, subordinate=01, sec-latency=0', 'CEMsk': 'RxErr- BadTLP- BadDLLP- Rollover- Timeout- NonFatalErr-', 'CESta': 'RxErr- BadTLP- BadDLLP- Rollover- Timeout- NonFatalErr-', 'Capabilities': '[300 v1] Vendor Specific Information: ID=0008 Rev=0 Len=038 <?>', 'Changed': 'MRL- PresDet- LinkState+', 'Compliance De-emphasis': '-6dB', 'Control': 'AttnInd Off, PwrInd Off, Power- Interlock-', 'DEV_NAME': 'PCI bridge: Intel Corporation Haswell-E PCI Express Root Port 1 (rev 02) (prog-if 00 [Normal decode])', 'DevCap': 'MaxPayload 256 bytes, PhantFunc 0, Latency L0s <64ns, L1 <1us', 'DevCap2': 'Completion Timeout: Range BCD, TimeoutDis+, LTR-, OBFF Not Supported ARIFwd+', 'DevCtl': 'Report errors: Correctable- Non-Fatal+ Fatal+ Unsupported-', 'DevCtl2': 'Completion Timeout: 260ms to 900ms, TimeoutDis-, LTR-, OBFF Disabled ARIFwd+', 'DevSta': 'CorrErr- UncorrErr- FatalErr- UnsuppReq- AuxPwr- TransPend-', 'Flags': 'PMEClk- DSI- D1- D2- AuxCurrent=0mA PME(D0+,D1-,D2-,D3hot+,D3cold+)', 'I/O behind bridge': '0000f000-00000fff', 'Kernel driver in use': 'pcieport', 'Kernel modules': 'shpchp', 'Latency': '0', 'LnkCap': 'Port #1, Speed 8GT/s, Width x8, ASPM L1, Latency L0 <512ns, L1 <16us', 'LnkCtl': 'ASPM Disabled; RCB 64 bytes Disabled- Retrain- CommClk+', 'LnkCtl2': 'Target Link Speed: 8GT/s, EnterCompliance- SpeedDis-', 'LnkSta': 'Speed 8GT/s, Width x8, TrErr- Train- SlotClk+ DLActive+ BWMgmt- ABWMgmt-', 'LnkSta2': 'Current De-emphasis Level: -6dB, EqualizationComplete+, EqualizationPhase1+', 'Masking': '00000003 Pending: 00000000', 'Memory behind bridge': '91c00000-91cfffff', 'Prefetchable memory behind bridge': '0000383ffc000000-0000383ffdffffff', 'RootCap': 'CRSVisible-', 'RootCtl': 'ErrCorrectable- ErrNon-Fatal+ ErrFatal+ PMEIntEna- CRSVisible-', 'RootSta': 'PME ReqID 0000, PMEStatus- PMEPending-', 'Secondary status': '66MHz- FastB2B- ParErr- DEVSEL=fast >TAbort- <TAbort- <MAbort+ <SERR- <PERR-', 'SltCap': 'AttnBtn- PwrCtrl- MRL- AttnInd- PwrInd- HotPlug- Surprise-', 'SltCtl': 'Enable: AttnBtn- PwrFlt- MRL- PresDet- CmdCplt- HPIrq- LinkChg-', 'SltSta': 'Status: AttnBtn- PowerFlt- MRL- CmdCplt- PresDet+ Interlock-', 'Status': 'D0 NoSoftRst+ PME-Enable- DSel=0 DScale=0 PME-', 'Transmit Margin': 'Normal Operating Range, EnterModifiedCompliance- ComplianceSOS-', 'UEMsk': 'DLP- SDES- TLP- FCP- CmpltTO- CmpltAbrt+ UnxCmplt- RxOF- MalfTLP- ECRC- UnsupReq+ ACSViol-', 'UESta': 'DLP- SDES- TLP- FCP- CmpltTO- CmpltAbrt- UnxCmplt- RxOF- MalfTLP- ECRC- UnsupReq- ACSViol-', 'UESvrt': 'DLP+ SDES+ TLP+ FCP+ CmpltTO+ CmpltAbrt+ UnxCmplt+ RxOF+ MalfTLP+ ECRC- UnsupReq- ACSViol+'} """ _return_value = True _reason = list() _pattern = re.compile(r'Speed\s*(.*),\s*Width\s*(\w*),') if 'LnkCap' in _record: if 'LnkSta' in _record: logging.debug('the key word LnkCap in log : %s' % (pformat(_record.get('LnkCap')))) logging.debug('the key word LnkSta in log : %s' % (pformat(_record.get('LnkSta')))) l_LnkCap = _pattern.findall(_record.get('LnkCap'))[0] logging.debug('l_LnkCap : %s' % pformat(l_LnkCap)) l_LnkSta = _pattern.findall(_record.get('LnkSta'))[0] logging.debug('l_LnkSta : %s' % pformat(l_LnkSta)) if l_LnkCap == l_LnkSta: logging.debug('Speed and Width compare PASSED') else: _reason.append('Speed and Width compare FAILED') logging.debug('Speed and Width compare FAILED') _return_value = False else: _reason.append('the key word <LnkSta> is not include in log %s' % (pformat(_record))) logging.debug('the key word LnkSta is not include in log %s' % (pformat(_record))) _return_value = False else: _reason.append('the key word <LnkCap> is not include in log %s' % (pformat(_record))) logging.debug('the key word LnkCap is not include in log %s' % (pformat(_record))) _return_value = False _record.update({'Reason': _reason}) return _return_value def output_detail_result(self, output_file): _show_list = ['Result', 'Reason', 'BUS_NO', 'DEV_NAME'] fp = xlwt.Workbook() table = fp.add_sheet('Detail Result') for _idx, _title in enumerate(_show_list): table.write(0, _idx, _title) for _row, _record in enumerate(self.result_list): for _column, _title in enumerate(_show_list): if _title in _record: if isinstance(_record.get(_title), list): _text = '\n'.join(_record.get(_title)) else: _text = _record.get(_title) else: _text = '' table.write(_row + 1, _column, _text) fp.save(output_file) def parse_command_line(): """ parse command line """ parser = argparse.ArgumentParser() parser.add_argument('--logfile', '-l', action="store", dest="log_file", help="log file path") parser.add_argument('--configfile', '-c', action="store", dest="config_file", help="config file path") parser.add_argument('--outputfile', '-o', action="store", dest="output_file", help="output file path") parser.add_argument('--resultfile', '-r', action="store", dest="result_file", help="result file path") parser.add_argument("--debug", '-d', action="store_true", dest="debug", default=False, help="Show debug info") args = parser.parse_args() config_file = args.config_file log_file = args.log_file output_file = args.output_file result_file = args.result_file if config_file is None: config_file = 'config.xml' if not os.path.exists(config_file): logging.error("Can't find config file!") logging.error("Please input config file path!") parser.print_help() sys.exit(1) args.config_file = config_file if log_file is None: log_file = 'log.txt' if not os.path.exists(log_file): logging.error("Can't find log file!") logging.error("Please input log file path!") parser.print_help() sys.exit(1) args.log_file = log_file if output_file is None: args.output_file = 'output.xls' if result_file is None: args.result_file = 'result.txt' return args def main(): args = parse_command_line() logger = init_logger(args.debug) logger.info('================== Start ==================') al = AnalysisLog(_config_file=args.config_file, _log_file=args.log_file) al.parse_config_file() # if al.warning_duplicate_dev_add(): al.parse_log_file() al.verify_result() if al.return_value: with open(args.result_file, 'w') as f: f.write(b'PASSED') logger.info('PASSED') else: with open(args.result_file, 'w') as f: f.write(b'FAILED') logger.info('FAILED') al.output_detail_result(args.output_file) logger.info('Detail log please check the %s' % args.output_file) logger.info('=================== End ===================') if __name__ == '__main__': main()<|fim▁end|>
elif isinstance(_expect_values, dict): logging.debug('_expect_values is dict') _key_word = _expect_values.get(self._key_word)
<|file_name|>menu.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input, Output, EventEmitter } from '@angular/core'; import { EventsService } from '../services/events.service'; import { Game } from '../models/game.model' import { Store } from '@ngrx/store'; import * as MenuActions from './menu.actions'; import * as fromRoot from '../reducers'; import { Observable } from 'rxjs/Observable'; @Component({ selector: 'kypo-menu', templateUrl: './menu.component.html', styleUrls: ['./menu.component.css'], providers: [EventsService] }) export class MenuComponent { games: Observable<Game[]>; selectedGame: Observable<Game>; constructor( private eventsService: EventsService, private store: Store<fromRoot.State> ) { this.selectedGame = store.select(fromRoot.selectSelectedGame); this.games = store.select(fromRoot.selectGames); }<|fim▁hole|> this.eventsService.getGames().subscribe(games => { // console.log(games); // this.games = games; this.store.dispatch(new MenuActions.LoadGames(games)); }); } onClick(game: Game) { // console.log(game); this.store.dispatch(new MenuActions.SelectGame(game)); } }<|fim▁end|>
ngOnInit() {
<|file_name|>static.test.js<|end_file_name|><|fim▁begin|>import { findPort, killApp, nextBuild, nextStart, renderViaHTTP, File, waitFor, } from 'next-test-utils' import webdriver from 'next-webdriver' import { join } from 'path' const appDir = join(__dirname, '../') let appPort let app let browser let html const indexPage = new File(join(appDir, 'pages/static-img.js')) const runTests = () => { it('Should allow an image with a static src to omit height and width', async () => { expect(await browser.elementById('basic-static')).toBeTruthy() expect(await browser.elementById('blur-png')).toBeTruthy() expect(await browser.elementById('blur-webp')).toBeTruthy() expect(await browser.elementById('blur-avif')).toBeTruthy() expect(await browser.elementById('blur-jpg')).toBeTruthy() expect(await browser.elementById('static-svg')).toBeTruthy() expect(await browser.elementById('static-gif')).toBeTruthy() expect(await browser.elementById('static-bmp')).toBeTruthy() expect(await browser.elementById('static-ico')).toBeTruthy() expect(await browser.elementById('static-unoptimized')).toBeTruthy() }) it('Should use immutable cache-control header for static import', async () => { await browser.eval( `document.getElementById("basic-static").scrollIntoView()` ) await waitFor(1000) const url = await browser.eval( `document.getElementById("basic-static").src` ) const res = await fetch(url) expect(res.headers.get('cache-control')).toBe( 'public, max-age=315360000, immutable' ) }) it('Should use immutable cache-control header even when unoptimized', async () => { await browser.eval( `document.getElementById("static-unoptimized").scrollIntoView()` ) await waitFor(1000) const url = await browser.eval( `document.getElementById("static-unoptimized").src` ) const res = await fetch(url) expect(res.headers.get('cache-control')).toBe( 'public, max-age=31536000, immutable' ) }) it('Should automatically provide an image height and width', async () => { expect(html).toContain('width:400px;height:300px') }) it('Should allow provided width and height to override intrinsic', async () => { expect(html).toContain('width:200px;height:200px') expect(html).not.toContain('width:400px;height:400px') }) it('Should add a blur placeholder to statically imported jpg', async () => { expect(html).toContain( `style="position:absolute;top:0;left:0;bottom:0;right:0;box-sizing:border-box;padding:0;border:none;margin:auto;display:block;width:0;height:0;min-width:100%;max-width:100%;min-height:100%;max-height:100%;filter:blur(20px);background-size:cover;background-image:url(&quot;data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/sBCgoKCgoKCwwMCw8QDhAPFhQTExQWIhgaGBoYIjMgJSAgJSAzLTcsKSw3LVFAODhAUV5PSk9ecWVlcY+Ij7u7+//CABEIAAYACAMBIgACEQEDEQH/xAAnAAEBAAAAAAAAAAAAAAAAAAAABwEBAAAAAAAAAAAAAAAAAAAAAP/aAAwDAQACEAMQAAAAmgP/xAAcEAACAQUBAAAAAAAAAAAAAAASFBMAAQMFERX/2gAIAQEAAT8AZ1HjrKZX55JysIc4Ff/EABQRAQAAAAAAAAAAAAAAAAAAAAD/2gAIAQIBAT8Af//EABQRAQAAAAAAAAAAAAAAAAAAAAD/2gAIAQMBAT8Af//Z&quot;);background-position:0% 0%"` ) }) it('Should add a blur placeholder to statically imported png', async () => { expect(html).toContain( `style="position:absolute;top:0;left:0;bottom:0;right:0;box-sizing:border-box;padding:0;border:none;margin:auto;display:block;width:0;height:0;min-width:100%;max-width:100%;min-height:100%;max-height:100%;filter:blur(20px);background-size:cover;background-image:url(&quot;data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAgAAAAICAAAAADhZOFXAAAAOklEQVR42iWGsQkAIBDE0iuIdiLOJjiGIzjiL/Meb4okiNYIlLjK3hJMzCQG1/0qmXXOUkjAV+m9wAMe3QiV6Ne8VgAAAABJRU5ErkJggg==&quot;);background-position:0% 0%"` ) }) } describe('Build Error Tests', () => { it('should throw build error when import statement is used with missing file', async () => { await indexPage.replace( '../public/foo/test-rect.jpg', '../public/foo/test-rect-broken.jpg' ) const { stderr } = await nextBuild(appDir, undefined, { stderr: true }) await indexPage.restore() expect(stderr).toContain( "Module not found: Can't resolve '../public/foo/test-rect-broken.jpg" ) // should contain the importing module expect(stderr).toContain('./pages/static-img.js') // should contain a import trace expect(stderr).not.toContain('Import trace for requested module') }) }) describe('Static Image Component Tests', () => { beforeAll(async () => { await nextBuild(appDir) appPort = await findPort() app = await nextStart(appDir, appPort) html = await renderViaHTTP(appPort, '/static-img') browser = await webdriver(appPort, '/static-img') }) afterAll(() => { killApp(app) })<|fim▁hole|>})<|fim▁end|>
runTests()
<|file_name|>update_collector_profile.test.js<|end_file_name|><|fim▁begin|>/* eslint-disable promise/always-return */ import { runAuthenticatedQuery, runQuery } from "schema/v1/test/utils" describe("UpdateCollectorProfile", () => { it("updates and returns a collector profile", () => { /* eslint-disable max-len */ const mutation = ` mutation {<|fim▁hole|> self_reported_purchases intents } } ` /* eslint-enable max-len */ const context = { updateCollectorProfileLoader: () => Promise.resolve({ id: "3", name: "Percy", email: "[email protected]", self_reported_purchases: "treats", intents: ["buy art & design"], }), } const expectedProfileData = { id: "3", name: "Percy", email: "[email protected]", self_reported_purchases: "treats", intents: ["buy art & design"], } expect.assertions(1) return runAuthenticatedQuery(mutation, context).then( ({ updateCollectorProfile }) => { expect(updateCollectorProfile).toEqual(expectedProfileData) } ) }) it("throws error when data loader is missing", () => { /* eslint-disable max-len */ const mutation = ` mutation { updateCollectorProfile(input: { professional_buyer: true, loyalty_applicant: true, self_reported_purchases: "trust me i buy art" }) { id name email self_reported_purchases intents } } ` /* eslint-enable max-len */ const errorResponse = "Missing Update Collector Profile Loader. Check your access token." expect.assertions(1) return runQuery(mutation) .then(() => { throw new Error("An error was not thrown but was expected.") }) .catch(error => { expect(error.message).toEqual(errorResponse) }) }) })<|fim▁end|>
updateCollectorProfile(input: { professional_buyer: true, loyalty_applicant: true, self_reported_purchases: "trust me i buy art", intents: [BUY_ART_AND_DESIGN] }) { id name email
<|file_name|>threatfox.py<|end_file_name|><|fim▁begin|>import logging from datetime import timedelta from core import Feed import pandas as pd from core.observables import Ip, Observable from core.errors import ObservableValidationError class ThreatFox(Feed): default_values = { "frequency": timedelta(hours=1), "name": "ThreatFox", "source": "https://threatfox.abuse.ch/export/json/recent/", "description": "Feed ThreatFox by Abuse.ch", } def update(self): for index, line in self.update_json(): self.analyze(line) def update_json(self): r = self._make_request(sort=False) if r: res = r.json() values = [r[0] for r in res.values()] df = pd.DataFrame(values) df["first_seen_utc"] = pd.to_datetime(df["first_seen_utc"]) df["last_seen_utc"] = pd.to_datetime(df["last_seen_utc"]) if self.last_run: df = df[df["first_seen_utc"] > self.last_run] df.fillna("-", inplace=True) return df.iterrows() def analyze(self, item): first_seen = item["first_seen_utc"] ioc_value = item["ioc_value"] ioc_type = item["ioc_type"] threat_type = item["threat_type"] malware_alias = item["malware_alias"] malware_printable = item["malware_printable"] last_seen_utc = item["last_seen_utc"] confidence_level = item["confidence_level"] reference = item["reference"] reporter = item["reporter"] tags = [] context = {"source": self.name} context["first_seen"] = first_seen if reference: context["reference"] = reference else: context["reference"] = "Unknown" if reporter: context["reporter"] = reporter else: context["reporter"] = "Unknown" if threat_type: context["threat_type"] = threat_type if item["tags"]: tags.extend(item["tags"].split(",")) if malware_printable: tags.append(malware_printable) if malware_alias: context["malware_alias"] = malware_alias if last_seen_utc: context["last_seen_utc"] = last_seen_utc if confidence_level: context["confidence_level"] = confidence_level value = None obs = None try: if "ip" in ioc_type: value, port = ioc_value.split(":") context["port"] = port obs = Ip.get_or_create(value=value) else: obs = Observable.add_text(ioc_value) except ObservableValidationError as e: logging.error(e) return<|fim▁hole|> if obs: obs.add_context(context) obs.add_source(self.name) if tags: obs.tag(tags) if malware_printable: obs.tags<|fim▁end|>
<|file_name|>jquery.thickbox.js<|end_file_name|><|fim▁begin|>/* * Thickbox 3.1 - One Box To Rule Them All. * By Cody Lindley (http://www.codylindley.com) * Copyright (c) 2007 cody lindley * Licensed under the MIT License: http://www.opensource.org/licenses/mit-license.php */ var tb_pathToImage = "images/loadingAnimation.gif"; /*!!!!!!!!!!!!!!!!! edit below this line at your own risk !!!!!!!!!!!!!!!!!!!!!!!*/ //on page load call tb_init $(document).ready(function(){ tb_init('a.thickbox, area.thickbox, input.thickbox');//pass where to apply thickbox imgLoader = new Image();// preload image imgLoader.src = tb_pathToImage; }); //add thickbox to href & area elements that have a class of .thickbox function tb_init(domChunk){ $(domChunk).click(function(){ var t = this.title || this.name || null; var a = this.href || this.alt; var g = this.rel || false; tb_show(t,a,g); this.blur(); return false; }); } function tb_show(caption, url, imageGroup) {//function called when the user clicks on a thickbox link try { if (typeof document.body.style.maxHeight === "undefined") {//if IE 6 $("body","html").css({height: "100%", width: "100%"}); $("html").css("overflow","hidden"); if (document.getElementById("TB_HideSelect") === null) {//iframe to hide select elements in ie6 $("body").append("<iframe id='TB_HideSelect'></iframe><div id='TB_overlay'></div><div id='TB_window'></div>"); $("#TB_overlay").click(tb_remove); } }else{//all others if(document.getElementById("TB_overlay") === null){ $("body").append("<div id='TB_overlay'></div><div id='TB_window'></div>"); $("#TB_overlay").click(tb_remove); } } if(tb_detectMacXFF()){ $("#TB_overlay").addClass("TB_overlayMacFFBGHack");//use png overlay so hide flash }else{ $("#TB_overlay").addClass("TB_overlayBG");//use background and opacity } if(caption===null){caption="";} $("body").append("<div id='TB_load'><img src='"+imgLoader.src+"' /></div>");//add loader to the page $('#TB_load').show();//show loader var baseURL; if(url.indexOf("?")!==-1){ //ff there is a query string involved baseURL = url.substr(0, url.indexOf("?")); }else{ baseURL = url; } var urlString = /\.jpg$|\.jpeg$|\.png$|\.gif$|\.bmp$/; var urlType = baseURL.toLowerCase().match(urlString); if(urlType == '.jpg' || urlType == '.jpeg' || urlType == '.png' || urlType == '.gif' || urlType == '.bmp'){//code to show images TB_PrevCaption = ""; TB_PrevURL = ""; TB_PrevHTML = ""; TB_NextCaption = ""; TB_NextURL = ""; TB_NextHTML = ""; TB_imageCount = ""; TB_FoundURL = false; if(imageGroup){ TB_TempArray = $("a[@rel="+imageGroup+"]").get(); for (TB_Counter = 0; ((TB_Counter < TB_TempArray.length) && (TB_NextHTML === "")); TB_Counter++) { var urlTypeTemp = TB_TempArray[TB_Counter].href.toLowerCase().match(urlString); if (!(TB_TempArray[TB_Counter].href == url)) { <|fim▁hole|> if (TB_FoundURL) { TB_NextCaption = TB_TempArray[TB_Counter].title; TB_NextURL = TB_TempArray[TB_Counter].href; TB_NextHTML = "<span id='TB_next'>&nbsp;&nbsp;<a href='#'>Next &gt;</a></span>"; } else { TB_PrevCaption = TB_TempArray[TB_Counter].title; TB_PrevURL = TB_TempArray[TB_Counter].href; TB_PrevHTML = "<span id='TB_prev'>&nbsp;&nbsp;<a href='#'>&lt; Prev</a></span>"; } } else { TB_FoundURL = true; TB_imageCount = "Image " + (TB_Counter + 1) +" of "+ (TB_TempArray.length); } } } imgPreloader = new Image(); imgPreloader.onload = function(){ imgPreloader.onload = null; // Resizing large images - orginal by Christian Montoya edited by me. var pagesize = tb_getPageSize(); var x = pagesize[0] - 150; var y = pagesize[1] - 150; var imageWidth = imgPreloader.width; var imageHeight = imgPreloader.height; if (imageWidth > x) { imageHeight = imageHeight * (x / imageWidth); imageWidth = x; if (imageHeight > y) { imageWidth = imageWidth * (y / imageHeight); imageHeight = y; } } else if (imageHeight > y) { imageWidth = imageWidth * (y / imageHeight); imageHeight = y; if (imageWidth > x) { imageHeight = imageHeight * (x / imageWidth); imageWidth = x; } } // End Resizing TB_WIDTH = imageWidth + 30; TB_HEIGHT = imageHeight + 60; $("#TB_window").append("<a href='' id='TB_ImageOff' title='Close'><img id='TB_Image' src='"+url+"' width='"+imageWidth+"' height='"+imageHeight+"' alt='"+caption+"'/></a>" + "<div id='TB_caption'>"+caption+"<div id='TB_secondLine'>" + TB_imageCount + TB_PrevHTML + TB_NextHTML + "</div></div><div id='TB_closeWindow'><a href='#' id='TB_closeWindowButton' title='Cerrar'>Cerrar</a></div>"); $("#TB_closeWindowButton").click(tb_remove); if (!(TB_PrevHTML === "")) { function goPrev(){ if($(document).unbind("click",goPrev)){$(document).unbind("click",goPrev);} $("#TB_window").remove(); $("body").append("<div id='TB_window'></div>"); tb_show(TB_PrevCaption, TB_PrevURL, imageGroup); return false; } $("#TB_prev").click(goPrev); } if (!(TB_NextHTML === "")) { function goNext(){ $("#TB_window").remove(); $("body").append("<div id='TB_window'></div>"); tb_show(TB_NextCaption, TB_NextURL, imageGroup); return false; } $("#TB_next").click(goNext); } document.onkeydown = function(e){ if (e == null) { // ie keycode = event.keyCode; } else { // mozilla keycode = e.which; } if(keycode == 27){ // close tb_remove(); } else if(keycode == 190){ // display previous image if(!(TB_NextHTML == "")){ document.onkeydown = ""; goNext(); } } else if(keycode == 188){ // display next image if(!(TB_PrevHTML == "")){ document.onkeydown = ""; goPrev(); } } }; tb_position(); $("#TB_load").remove(); $("#TB_ImageOff").click(tb_remove); $("#TB_window").css({display:"block"}); //for safari using css instead of show }; imgPreloader.src = url; }else{//code to show html var queryString = url.replace(/^[^\?]+\??/,''); var params = tb_parseQuery( queryString ); TB_WIDTH = (params['width']*1) + 30 || 630; //defaults to 630 if no paramaters were added to URL TB_HEIGHT = (params['height']*1) + 40 || 440; //defaults to 440 if no paramaters were added to URL ajaxContentW = TB_WIDTH - 30; ajaxContentH = TB_HEIGHT - 45; if(url.indexOf('TB_iframe') != -1){// either iframe or ajax window urlNoQuery = url.split('TB_'); $("#TB_iframeContent").remove(); if(params['modal'] != "true"){//iframe no modal $("#TB_window").append("<div id='TB_title'><div id='TB_ajaxWindowTitle'>"+caption+"</div><div id='TB_closeAjaxWindow'><a href='#' id='TB_closeWindowButton' title='Cerrar'>Cerrar</a></div></div><iframe frameborder='0' hspace='0' src='"+urlNoQuery[0]+"' id='TB_iframeContent' name='TB_iframeContent"+Math.round(Math.random()*1000)+"' onload='tb_showIframe()' style='width:"+(ajaxContentW + 29)+"px;height:"+(ajaxContentH + 17)+"px;' > </iframe>"); }else{//iframe modal $("#TB_overlay").unbind(); $("#TB_window").append("<iframe frameborder='0' hspace='0' src='"+urlNoQuery[0]+"' id='TB_iframeContent' name='TB_iframeContent"+Math.round(Math.random()*1000)+"' onload='tb_showIframe()' style='width:"+(ajaxContentW + 29)+"px;height:"+(ajaxContentH + 17)+"px;'> </iframe>"); } }else{// not an iframe, ajax if($("#TB_window").css("display") != "block"){ if(params['modal'] != "true"){//ajax no modal $("#TB_window").append("<div id='TB_title'><div id='TB_ajaxWindowTitle'>"+caption+"</div><div id='TB_closeAjaxWindow'><a href='#' id='TB_closeWindowButton'>Cerrar</a></div></div><div id='TB_ajaxContent' style='width:"+ajaxContentW+"px;height:"+ajaxContentH+"px'></div>"); }else{//ajax modal $("#TB_overlay").unbind(); $("#TB_window").append("<div id='TB_ajaxContent' class='TB_modal' style='width:"+ajaxContentW+"px;height:"+ajaxContentH+"px;'></div>"); } }else{//this means the window is already up, we are just loading new content via ajax $("#TB_ajaxContent")[0].style.width = ajaxContentW +"px"; $("#TB_ajaxContent")[0].style.height = ajaxContentH +"px"; $("#TB_ajaxContent")[0].scrollTop = 0; $("#TB_ajaxWindowTitle").html(caption); } } $("#TB_closeWindowButton").click(tb_remove); if(url.indexOf('TB_inline') != -1){ $("#TB_ajaxContent").append($('#' + params['inlineId']).children()); $("#TB_window").unload(function () { $('#' + params['inlineId']).append( $("#TB_ajaxContent").children() ); // move elements back when you're finished }); tb_position(); $("#TB_load").remove(); $("#TB_window").css({display:"block"}); }else if(url.indexOf('TB_iframe') != -1){ tb_position(); if($.browser.safari){//safari needs help because it will not fire iframe onload $("#TB_load").remove(); $("#TB_window").css({display:"block"}); } }else{ $("#TB_ajaxContent").load(url += "&random=" + (new Date().getTime()),function(){//to do a post change this load method tb_position(); $("#TB_load").remove(); tb_init("#TB_ajaxContent a.thickbox"); $("#TB_window").css({display:"block"}); }); } } if(!params['modal']){ document.onkeyup = function(e){ if (e == null) { // ie keycode = event.keyCode; } else { // mozilla keycode = e.which; } if(keycode == 27){ // close tb_remove(); } }; } } catch(e) { //nothing here } } //helper functions below function tb_showIframe(){ $("#TB_load").remove(); $("#TB_window").css({display:"block"}); } function tb_remove() { $("#TB_imageOff").unbind("click"); $("#TB_closeWindowButton").unbind("click"); $("#TB_window").fadeOut("fast",function(){$('#TB_window,#TB_overlay,#TB_HideSelect').trigger("unload").unbind().remove();}); $("#TB_load").remove(); if (typeof document.body.style.maxHeight == "undefined") {//if IE 6 $("body","html").css({height: "auto", width: "auto"}); $("html").css("overflow",""); } document.onkeydown = ""; document.onkeyup = ""; return false; } function tb_position() { $("#TB_window").css({marginLeft: '-' + parseInt((TB_WIDTH / 2),10) + 'px', width: TB_WIDTH + 'px'}); if ( !(jQuery.browser.msie && jQuery.browser.version < 7)) { // take away IE6 $("#TB_window").css({marginTop: '-' + parseInt((TB_HEIGHT / 2),10) + 'px'}); } } function tb_parseQuery ( query ) { var Params = {}; if ( ! query ) {return Params;}// return empty object var Pairs = query.split(/[;&]/); for ( var i = 0; i < Pairs.length; i++ ) { var KeyVal = Pairs[i].split('='); if ( ! KeyVal || KeyVal.length != 2 ) {continue;} var key = unescape( KeyVal[0] ); var val = unescape( KeyVal[1] ); val = val.replace(/\+/g, ' '); Params[key] = val; } return Params; } function tb_getPageSize(){ var de = document.documentElement; var w = window.innerWidth || self.innerWidth || (de&&de.clientWidth) || document.body.clientWidth; var h = window.innerHeight || self.innerHeight || (de&&de.clientHeight) || document.body.clientHeight; arrayPageSize = [w,h]; return arrayPageSize; } function tb_detectMacXFF() { var userAgent = navigator.userAgent.toLowerCase(); if (userAgent.indexOf('mac') != -1 && userAgent.indexOf('firefox')!=-1) { return true; } }<|fim▁end|>
<|file_name|>cities.server.routes.js<|end_file_name|><|fim▁begin|>'use strict'; /** * Module dependencies */ var citiesPolicy = require('../policies/cities.server.policy'), cities = require('../controllers/cities.server.controller'); module.exports = function (app) {<|fim▁hole|> .post(cities.create); // Single city routes app.route('/api/cities/:cityId').all(citiesPolicy.isAllowed) .get(cities.read) .put(cities.update) .delete(cities.delete); // Finish by binding the city middleware app.param('cityId', cities.cityByID); };<|fim▁end|>
// City collection routes app.route('/api/cities').all(citiesPolicy.isAllowed) .get(cities.list)
<|file_name|>lights.py<|end_file_name|><|fim▁begin|>import logging import requests HUE_IP = '192.168.86.32' HUE_USERNAME = '7KcxItfntdF0DuWV9t0GPMeToEBlvHTgqWNZqxu6' logger = logging.getLogger('hue') def getLights(): url = 'http://{0}/api/{1}/lights'.format(HUE_IP, HUE_USERNAME)<|fim▁hole|> logger.error('Failed getting status for all lights') return if r.status_code == 200: data = r.json() return data def getStatus(id): url = 'http://{0}/api/{1}/lights/{2}'.format(HUE_IP, HUE_USERNAME, id) try: r = requests.get(url) except: logger.error('Failed getting status for light {0}'.format (id)) return if r.status_code == 200: data = r.json() return data<|fim▁end|>
try: r = requests.get(url) except:
<|file_name|>CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45.cpp Label Definition File: CWE36_Absolute_Path_Traversal.label.xml Template File: sources-sink-45.tmpl.cpp */ /* * @description * CWE: 36 Absolute Path Traversal * BadSource: file Read input from a file * GoodSource: Full path and file name * Sinks: fopen * BadSink : Open the file named in data using fopen() * Flow Variant: 45 Data flow: data passed as a static global variable from one function to another in the same source file * * */ #include "std_testcase.h" #ifndef _WIN32 #include <wchar.h> <|fim▁hole|>#endif #ifdef _WIN32 #define FILENAME "C:\\temp\\file.txt" #else #define FILENAME "/tmp/file.txt" #endif #ifdef _WIN32 #define FOPEN _wfopen #else #define FOPEN fopen #endif namespace CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45 { static wchar_t * badData; static wchar_t * goodG2BData; #ifndef OMITBAD static void badSink() { wchar_t * data = badData; { FILE *pFile = NULL; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ pFile = FOPEN(data, L"wb+"); if (pFile != NULL) { fclose(pFile); } } } void bad() { wchar_t * data; wchar_t dataBuffer[FILENAME_MAX] = L""; data = dataBuffer; { /* Read input from a file */ size_t dataLen = wcslen(data); FILE * pFile; /* if there is room in data, attempt to read the input from a file */ if (FILENAME_MAX-dataLen > 1) { pFile = fopen(FILENAME, "r"); if (pFile != NULL) { /* POTENTIAL FLAW: Read data from a file */ if (fgetws(data+dataLen, (int)(FILENAME_MAX-dataLen), pFile) == NULL) { printLine("fgetws() failed"); /* Restore NUL terminator if fgetws fails */ data[dataLen] = L'\0'; } fclose(pFile); } } } badData = data; badSink(); } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B() uses the GoodSource with the BadSink */ static void goodG2BSink() { wchar_t * data = goodG2BData; { FILE *pFile = NULL; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ pFile = FOPEN(data, L"wb+"); if (pFile != NULL) { fclose(pFile); } } } static void goodG2B() { wchar_t * data; wchar_t dataBuffer[FILENAME_MAX] = L""; data = dataBuffer; #ifdef _WIN32 /* FIX: Use a fixed, full path and file name */ wcscat(data, L"c:\\temp\\file.txt"); #else /* FIX: Use a fixed, full path and file name */ wcscat(data, L"/tmp/file.txt"); #endif goodG2BData = data; goodG2BSink(); } void good() { goodG2B(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif<|fim▁end|>
<|file_name|>test_tracking_logger.py<|end_file_name|><|fim▁begin|>import logging import pytest def test_tracing_by_function_if_enable(track_logger, handler): msg1 = 'TEST1' msg2 = 'TEST2' msg3 = 'TEST3' track_logger.setLevel(logging.INFO) track_logger.enable_tracking() track_logger.debug(msg1) track_logger.info(msg2) track_logger.disable_tracking() track_logger.debug(msg3) record = handler.pop() assert record.msg == msg2 assert record.levelname == logging.getLevelName(logging.INFO) with pytest.raises(IndexError): handler.pop() def test_tracing_by_function_if_enable_with_exc(track_logger, handler): msg1 = 'TEST1' msg2 = 'TEST2' msg3 = 'TEST3' track_logger.setLevel(logging.INFO) track_logger.enable_tracking() try: track_logger.debug(msg1) track_logger.info(msg2) raise Exception except Exception: track_logger.exit_with_exc() track_logger.debug(msg3) track_logger.disable_tracking() record_2 = handler.pop() record_1 = handler.pop() assert record_1.msg == msg1 assert record_1.levelname == logging.getLevelName(logging.DEBUG) assert record_2.msg == msg2 assert record_2.levelname == logging.getLevelName(logging.INFO) with pytest.raises(IndexError): handler.pop() def test_tracing_by_context(track_logger, handler): msg1 = 'TEST1' msg2 = 'TEST2' msg3 = 'TEST3' track_logger.setLevel(logging.INFO) with track_logger.trace: track_logger.debug(msg1) track_logger.info(msg2) track_logger.debug(msg3) record = handler.pop() assert record.msg == msg2 assert record.levelname == logging.getLevelName(logging.INFO) with pytest.raises(IndexError): handler.pop() def test_tracing_by_context_with_exc(track_logger, handler): msg1 = 'TEST1' msg2 = 'TEST2' msg3 = 'TEST3' track_logger.setLevel(logging.INFO) try: with track_logger.trace: track_logger.debug(msg1) track_logger.info(msg2) raise Exception except Exception: pass track_logger.debug(msg3) record_2 = handler.pop() record_1 = handler.pop() assert record_1.msg == msg1 assert record_1.levelname == logging.getLevelName(logging.DEBUG) assert record_2.msg == msg2 assert record_2.levelname == logging.getLevelName(logging.INFO) with pytest.raises(IndexError): handler.pop() def test_tracing_by_decorator(track_logger, handler): msg1 = 'TEST1' msg2 = 'TEST2' msg3 = 'TEST3' track_logger.setLevel(logging.INFO) @track_logger.trace def trace_func(): track_logger.debug(msg1) track_logger.info(msg2) trace_func() track_logger.debug(msg3) record = handler.pop() assert record.msg == msg2 assert record.levelname == logging.getLevelName(logging.INFO) with pytest.raises(IndexError): handler.pop() def test_tracing_by_decorator_with_exc(track_logger, handler): msg1 = 'TEST1' msg2 = 'TEST2' msg3 = 'TEST3' track_logger.setLevel(logging.INFO) @track_logger.trace def trace_func(): track_logger.debug(msg1) <|fim▁hole|> try: trace_func() except Exception: pass track_logger.debug(msg3) record_2 = handler.pop() record_1 = handler.pop() assert record_1.msg == msg1 assert record_1.levelname == logging.getLevelName(logging.DEBUG) assert record_2.msg == msg2 assert record_2.levelname == logging.getLevelName(logging.INFO) with pytest.raises(IndexError): handler.pop()<|fim▁end|>
track_logger.info(msg2) raise Exception
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use std::fs; use std::path; use littletest; use adapter::{Adapter}; pub struct RunOptions<'o> { pub ignore_todo: bool, pub engine: Box<Adapter + 'o> } pub struct TestCase<'o> { pub input_path: path::PathBuf, pub expected_path: path::PathBuf, pub opts: &'o RunOptions<'o> } impl<'o> TestCase<'o> { pub fn is_todo(&self) -> bool { match self.input_path.to_str() { Some(x) => x.contains("todo"),<|fim▁hole|> fn clean_output(css: &str) -> String { let despaced = regex!(r"\s+").replace_all(css, " "); let destarred = regex!(r" *\{").replace_all(despaced.as_ref(), " {\n"); let newlined = regex!(r"([;,]) *").replace_all(destarred.as_ref(), "$1\n"); let destarred2 = regex!(r" *\} *").replace_all(newlined.as_ref(), " }\n"); let trim: &[_] = &[' ', '\t', '\n', '\r']; destarred2.trim_matches(trim).to_string() } impl<'a> littletest::Runnable for TestCase<'a> { fn run(&self) -> littletest::TestResult { use littletest::{TestResult}; use std::io::Read; if self.opts.ignore_todo && self.is_todo() { return TestResult::Skipped } let result = self.opts.engine.compile(&self.input_path); if result.is_err() { return TestResult::Fail } let output = clean_output(result.unwrap().as_ref()); let expected_display = self.expected_path.display(); let mut expected_buffer = String::new(); let expected = match fs::File::open(&self.expected_path) { Err(why) => panic!("couldn't open {}: {}", expected_display, why), Ok(mut file) => match file.read_to_string(&mut expected_buffer) { Err(why) => panic!("couldn't read {}: {}", expected_display, why), Ok(_) => clean_output(expected_buffer.as_ref()) } }; if output != expected { return TestResult::Fail } TestResult::Pass } } pub fn load<'o>(spec_path: &path::Path, opts: &'o RunOptions) -> Vec<Box<littletest::Runnable + Sync + 'o>> { use glob::glob; let input_file = "input.scss"; let expected_file = "expected_output.css"; let mut pattern = spec_path.to_path_buf(); pattern.push("**"); pattern.push(input_file); glob(pattern.to_str().unwrap()) .unwrap() .map(|result| { let path = result.unwrap(); let dir = path.parent().unwrap().to_path_buf(); Box::new(TestCase { input_path: path, expected_path: dir.join(expected_file), opts: opts }) as Box<littletest::Runnable + Sync + 'o> }) .collect() }<|fim▁end|>
None => false } } }
<|file_name|>turbulentIntensityKineticEnergyInletFvPatchScalarField.H<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\ ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | \\ / A nd | Copyright (C) 2006-2010 OpenCFD Ltd. \\/ M anipulation | ------------------------------------------------------------------------------- License This file is part of OpenFOAM. OpenFOAM is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OpenFOAM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>. Class Foam::turbulentIntensityKineticEnergyInletFvPatchScalarField Description Calculate turbulent kinetic energy from the intensity provided as a fraction of the mean velocity Example of the boundary condition specification: @verbatim inlet { type turbulentIntensityKineticEnergyInlet; intensity 0.05; // 5% turbulence value uniform 1; // placeholder } @endverbatim SourceFiles turbulentIntensityKineticEnergyInletFvPatchScalarField.C \*---------------------------------------------------------------------------*/ #ifndef turbulentIntensityKineticEnergyInletFvPatchScalarField_H #define turbulentIntensityKineticEnergyInletFvPatchScalarField_H #include <finiteVolume/inletOutletFvPatchFields.H> // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // namespace Foam { /*---------------------------------------------------------------------------*\ Class turbulentIntensityKineticEnergyInletFvPatch Declaration \*---------------------------------------------------------------------------*/ <|fim▁hole|>{ // Private data //- Turbulent intensity as fraction of mean velocity scalar intensity_; //- Name of the velocity field word UName_; //- Name of the flux field word phiName_; public: //- Runtime type information TypeName("turbulentIntensityKineticEnergyInlet"); // Constructors //- Construct from patch and internal field turbulentIntensityKineticEnergyInletFvPatchScalarField ( const fvPatch&, const DimensionedField<scalar, volMesh>& ); //- Construct from patch, internal field and dictionary turbulentIntensityKineticEnergyInletFvPatchScalarField ( const fvPatch&, const DimensionedField<scalar, volMesh>&, const dictionary& ); //- Construct by mapping given // turbulentIntensityKineticEnergyInletFvPatchScalarField // onto a new patch turbulentIntensityKineticEnergyInletFvPatchScalarField ( const turbulentIntensityKineticEnergyInletFvPatchScalarField&, const fvPatch&, const DimensionedField<scalar, volMesh>&, const fvPatchFieldMapper& ); //- Construct as copy turbulentIntensityKineticEnergyInletFvPatchScalarField ( const turbulentIntensityKineticEnergyInletFvPatchScalarField& ); //- Construct and return a clone virtual tmp<fvPatchScalarField> clone() const { return tmp<fvPatchScalarField> ( new turbulentIntensityKineticEnergyInletFvPatchScalarField ( *this ) ); } //- Construct as copy setting internal field reference turbulentIntensityKineticEnergyInletFvPatchScalarField ( const turbulentIntensityKineticEnergyInletFvPatchScalarField&, const DimensionedField<scalar, volMesh>& ); //- Construct and return a clone setting internal field reference virtual tmp<fvPatchScalarField> clone ( const DimensionedField<scalar, volMesh>& iF ) const { return tmp<fvPatchScalarField> ( new turbulentIntensityKineticEnergyInletFvPatchScalarField ( *this, iF ) ); } // Member functions //- Update the coefficients associated with the patch field virtual void updateCoeffs(); //- Write virtual void write(Ostream&) const; }; // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // } // End namespace Foam // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // #endif // ************************ vim: set sw=4 sts=4 et: ************************ //<|fim▁end|>
class turbulentIntensityKineticEnergyInletFvPatchScalarField : public inletOutletFvPatchScalarField
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod eval_functions; pub mod types; mod env; <|fim▁hole|>pub use self::eval_functions::*; pub use self::env::RollerEnv;<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*- # Copyright (c) 2013, Theo Crevon # Copyright (c) 2013, Greg Leclercq # # See the file LICENSE for copying permission. from itertools import groupby from swf.models.event import EventFactory, CompiledEventFactory from swf.models.event.workflow import WorkflowExecutionEvent from swf.utils import cached_property class History(object): """Execution events history container History object is an Event subclass objects container which can be built directly against an amazon json response using it's from_event_list method. It is iterable and exposes a list-like __getitem__ for easier manipulation. :param events: Events list to build History upon :type events: list Typical amazon response looks like: .. code-block:: json { "events": [ { 'eventId': 1, 'eventType': 'WorkflowExecutionStarted', 'workflowExecutionStartedEventAttributes': { 'taskList': { 'name': 'test' }, 'parentInitiatedEventId': 0, 'taskStartToCloseTimeout': '300', 'childPolicy': 'TERMINATE', 'executionStartToCloseTimeout': '6000', 'workflowType': { 'version': '0.1', 'name': 'test-1' }, }, 'eventTimestamp': 1365177769.585, }, { 'eventId': 2, 'eventType': 'DecisionTaskScheduled', 'decisionTaskScheduledEventAttributes': { 'startToCloseTimeout': '300', 'taskList': { 'name': 'test' } }, 'eventTimestamp': 1365177769.585 } ] } """ def __init__(self, *args, **kwargs): self.events = kwargs.pop('events', []) self.raw = kwargs.pop('raw', None) self.it_pos = 0 def __len__(self): return len(self.events) def __getitem__(self, val): if isinstance(val, int): return self.events[val] elif isinstance(val, slice): return History(events=self.events[val]) raise TypeError("Unknown slice format: %s" % type(val)) def __repr__(self): events_repr = '\n\t'.join( map(lambda e: e.__repr__(), self.events) ) repr_str = '<History\n\t%s\n>' % events_repr return repr_str def __iter__(self): return self def next(self): try: next_event = self.events[self.it_pos] self.it_pos += 1 except IndexError: self.it_pos = 0 raise StopIteration return next_event @property def last(self): """Returns the last stored event :rtype: swf.models.event.Event """ return self.events[-1] def latest(self, n): """Returns the n latest events stored in the History :param n: latest events count to return :type n: int :rtype: list """ end_pos = len(self.events) start_pos = len(self.events) - n return self.events[start_pos:end_pos] @property def first(self): """Returns the first stored event :rtype: swf.models.event.Event """ return self.events[0] @property def finished(self): """Checks if the History matches with a finished Workflow Execution history state. """ completion_states = ( 'completed', 'failed', 'canceled', 'terminated' ) if (isinstance(self.last, WorkflowExecutionEvent) and self.last.state in completion_states): return True return False def filter(self, **kwargs): """Filters the history based on kwargs events attributes Basically, allows to filter the history events upon their types and states. Can be used for example to retrieve every 'DecisionTask' in the history, to check the presence of a specific event and so on... example: .. code-block:: python >>> history_obj.filter(type='ActivityTask', state='completed') # doctest: +SKIP <History <Event 23 ActivityTask : completed> <Event 42 ActivityTask : completed> <Event 61 ActivityTask : completed> > >>> history_obj.filter(type='DecisionTask') # doctest: +SKIP <History <Event 2 DecisionTask : scheduled> <Event 3 DecisionTask : started> <Event 7 DecisionTask : scheduled> <Event 8 DecisionTask : started> <Event 20 DecisionTask : scheduled> <Event 21 DecisionTask : started> > :rtype: swf.models.history.History """ return filter( lambda e: all(getattr(e, k) == v for k, v in kwargs.iteritems()), self.events ) @property def reversed(self): for i in xrange(len(self.events) - 1, -1, -1): yield self.events[i] @property def distinct(self): """Extracts distinct history events based on their types :rtype: list of swf.models.event.Event """ distinct_events = [] for key, group in groupby(self.events, lambda e: e.type): g = list(group) # Merge every WorkflowExecution events into same group if (len(g) == 1 and len(distinct_events) >= 1 and g[0].type == "WorkflowExecution"): # WorfklowExecution group will always be in first position distinct_events[0].extend(g) else: distinct_events.append(list(g)) return distinct_events def compile(self): """Compiles history events into a stateful History based on events types and states transitions. Every events stored in the resulting history are stateful CompiledEvent subclasses instances then. :rtype: swf.models.history.History made of swf.models.event.CompiledEvent """ distinct_events = self.distinct compiled_history = [] for events_list in distinct_events: if len(events_list) > 0: compiled_event = CompiledEventFactory(events_list[0]) for event in events_list[1:]: compiled_event.transit(event) compiled_history.append(compiled_event) return History(events=compiled_history) @cached_property def compiled(self): """Compiled history version :rtype: swf.models.history.History made of swf.models.event.CompiledEvent """ return self.compile() @classmethod def from_event_list(cls, data): """Instantiates a new ``swf.models.history.History`` instance from amazon service response. Every member of the History are ``swf.models.event.Event`` subclasses instances, exposing their type, state, and so on to facilitate decisions according to the history. :param data: event history description (typically, an amazon response) :type data: dict :returns: History model instance built upon data description<|fim▁hole|> """ events_history = [] for index, d in enumerate(data): event = EventFactory(d) events_history.append(event) return cls(events=events_history, raw=data)<|fim▁end|>
:rtype : swf.model.event.History
<|file_name|>interface_c_p_event.js<|end_file_name|><|fim▁begin|>var interface_c_p_event = [ [ "buttonNumber", "interface_c_p_event.html#a95f060acf8268fe7f3a16c42ea121466", null ], [ "characters", "interface_c_p_event.html#a1bf8cbf0c017151070a99236c957fc94", null ], [ "charactersIgnoringModifiers", "interface_c_p_event.html#aa1b99c685842a415f6736d776c8aba90", null ], [ "clickCount", "interface_c_p_event.html#a21ba312feb01ac26df9cab0eae85cdcc", null ], [ "currentTimestamp", "interface_c_p_event.html#a0370261e16fa1b67388c1a027efff02a", null ], [ "data1", "interface_c_p_event.html#a2129df252602ed408c8e9241d8e89aad", null ], [ "data2", "interface_c_p_event.html#a5a9e2ed2d7faffaef4648da3257ac31b", null ], [ "deltaX", "interface_c_p_event.html#a8bcf33e092b83a9bf10fc2aabae6c249", null ], [ "deltaY", "interface_c_p_event.html#a1804678959cc324a1af7386b71720cf2", null ], [ "deltaZ", "interface_c_p_event.html#adb216a38817ab6a03fc50276fb7abad9", null ], [ "description", "interface_c_p_event.html#af262b1f3b4aced3b2bd3044bd15af565", null ], [ "globalLocation", "interface_c_p_event.html#ad464fd6df3bf20110075dc81ce662a44", null ], [ "isARepeat", "interface_c_p_event.html#a0d6e1d0c4afa0c4231bedcdd9401ba87", null ], [ "keyCode", "interface_c_p_event.html#a5ecd01fadd6c4bc25a8508dc4bb7cca7", null ], [ "keyEventWithType:location:modifierFlags:timestamp:windowNumber:context:characters:charactersIgnoringModifiers:isARepeat:keyCode:", "interface_c_p_event.html#af5f96235d926dc455c492ece4d907744", null ], [ "locationInWindow", "interface_c_p_event.html#a5a57f4fa909de5d83e5402a8d8af4b94", null ], [ "modifierFlags", "interface_c_p_event.html#ae1990580a2f73b19009f30f27ff1e396", null ], [ "mouseEventWithType:location:modifierFlags:timestamp:windowNumber:context:eventNumber:clickCount:pressure:", "interface_c_p_event.html#ad6bbef973e2dd36732fe8b51be55e3e7", null ], [ "mouseLocation", "interface_c_p_event.html#a91d4b5abd148e3976056714f6cf658fc", null ], [ "otherEventWithType:location:modifierFlags:timestamp:windowNumber:context:subtype:data1:data2:", "interface_c_p_event.html#a90910bafcdc04fe2641c9350669d9810", null ], [ "pressure", "interface_c_p_event.html#acac4af271cc9cbeb999d0c148767fc99", null ],<|fim▁hole|> [ "type", "interface_c_p_event.html#abbbd5ff6550996fa3ef524ef27a1ef80", null ], [ "window", "interface_c_p_event.html#af9a0e45bbaba9bc6587a222e74bf0703", null ], [ "windowNumber", "interface_c_p_event.html#a15734c6e53f37894f69326ac46ec3600", null ] ];<|fim▁end|>
[ "startPeriodicEventsAfterDelay:withPeriod:", "interface_c_p_event.html#a474ff767ceaf957fbde1a662d01b4788", null ], [ "stopPeriodicEvents", "interface_c_p_event.html#a5071c8f5ce8fd7d8a08e6ae4756b1d2d", null ], [ "timestamp", "interface_c_p_event.html#ab290762686ceda8bb462e0e989f62e04", null ],
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'requirements.txt')) as fp: requires = fp.readlines() setup( name='cebulany manager', version='0.0.4', classifiers=[], author='Firemark', author_email='[email protected]', url='https://github.com/hackerspace-silesia/cebulany-manager',<|fim▁hole|> install_requires=requires, tests_require=requires, )<|fim▁end|>
packages=find_packages(),
<|file_name|>_virtual_router_peerings_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class VirtualRouterPeeringsOperations(object): """VirtualRouterPeeringsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2019_09_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def _delete_initial( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}/peerings/{peeringName}'} # type: ignore def begin_delete( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Deletes the specified peering from a Virtual Router. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_router_name: The name of the Virtual Router. :type virtual_router_name: str :param peering_name: The name of the peering. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, virtual_router_name=virtual_router_name, peering_name=peering_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}/peerings/{peeringName}'} # type: ignore def get( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.VirtualRouterPeering" """Gets the specified Virtual Router Peering. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_router_name: The name of the Virtual Router. :type virtual_router_name: str :param peering_name: The name of the Virtual Router Peering. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualRouterPeering, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_09_01.models.VirtualRouterPeering :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualRouterPeering"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('VirtualRouterPeering', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}/peerings/{peeringName}'} # type: ignore def _create_or_update_initial( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str parameters, # type: "_models.VirtualRouterPeering" **kwargs # type: Any ): # type: (...) -> "_models.VirtualRouterPeering" cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualRouterPeering"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'VirtualRouterPeering') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('VirtualRouterPeering', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('VirtualRouterPeering', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}/peerings/{peeringName}'} # type: ignore def begin_create_or_update( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str parameters, # type: "_models.VirtualRouterPeering" **kwargs # type: Any ): # type: (...) -> LROPoller["_models.VirtualRouterPeering"] """Creates or updates the specified Virtual Router Peering. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_router_name: The name of the Virtual Router. :type virtual_router_name: str :param peering_name: The name of the Virtual Router Peering. :type peering_name: str :param parameters: Parameters supplied to the create or update Virtual Router Peering operation. :type parameters: ~azure.mgmt.network.v2019_09_01.models.VirtualRouterPeering :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualRouterPeering or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_09_01.models.VirtualRouterPeering] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualRouterPeering"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, virtual_router_name=virtual_router_name, peering_name=peering_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('VirtualRouterPeering', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}/peerings/{peeringName}'} # type: ignore def list( self, resource_group_name, # type: str virtual_router_name, # type: str **kwargs # type: Any ): # type: (...) -> Iterable["_models.VirtualRouterPeeringListResult"] """Lists all Virtual Router Peerings in a Virtual Router resource. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_router_name: The name of the Virtual Router. :type virtual_router_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualRouterPeeringListResult or the result of cls(response)<|fim▁hole|> cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualRouterPeeringListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('VirtualRouterPeeringListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}/peerings'} # type: ignore<|fim▁end|>
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_09_01.models.VirtualRouterPeeringListResult] :raises: ~azure.core.exceptions.HttpResponseError """
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate hydromath; #[test] fn test_rmse() { let test_obs = &[13., 17., 18., 20., 24.]; let test_sim = &[12., 15., 20., 22., 24.]; let result = hydromath::rmse(test_obs, test_sim); assert_eq!(result, (2.6 as f64).sqrt()); } #[test] fn test_rmse_perfect() { let test_obs = &[1., 2., 3., 4., 5.]; let test_sim = &[1., 2., 3., 4., 5.]; let result = hydromath::rmse(test_obs, test_sim); assert_eq!(result, (0 as f64)); } #[test] fn test_rmse_bad() { let obs: [f64; 5] = [1., 2., 3., 4., 5.]; let sum: f64 = obs.iter().fold(0f64, std::ops::Add::add); let m = sum / (obs.len() as f64); let sim = [m, m, m, m, m]; let result = hydromath::rmse(&[1., 2., 3., 4., 5.], &sim); assert_eq!(result, (2.0 as f64).sqrt()); } #[test] fn test_mse() { let test_obs = &[13., 17., 18., 20., 24.]; let test_sim = &[12., 15., 20., 22., 24.]; let result = hydromath::mse(test_obs, test_sim); assert_eq!(result, 2.6); } #[test] fn test_mse_perfect() { let result = hydromath::mse(&[1., 2., 3., 4., 5.], &[1., 2., 3., 4., 5.]); assert_eq!(result, 0.0); } #[test] fn test_mse_bad() { let obs: [f64; 5] = [1., 2., 3., 4., 5.]; let sum: f64 = obs.iter().fold(0f64, std::ops::Add::add); let m = sum / (obs.len() as f64); let sim = [m, m, m, m, m]; let result = hydromath::mse(&[1., 2., 3., 4., 5.], &sim); assert_eq!(result, 2.0); } #[test] fn test_nse() { let test_obs = &[5., 4., 6., 1., 3., 6., 8., 1., 7., 3., 4., 0.5]; let test_sim = &[3., 4.5, 4., 2., 4., 5., 9., 2., 8., 3., 4., 0.8]; let result = hydromath::nse(test_obs, test_sim); assert_eq!(result, 0.783479081472161); } #[test] fn test_nse_perfect() { let result = hydromath::nse(&[1., 2., 3., 4., 5.], &[1., 2., 3., 4., 5.]); assert_eq!(result, 1.0); } #[test] fn test_nse_bad() { let obs: [f64; 5] = [1., 2., 3., 4., 5.]; let sum: f64 = obs.iter().fold(0f64, std::ops::Add::add); let m = sum / (obs.len() as f64); let sim = [m, m, m, m, m]; let result = hydromath::nse(&[1., 2., 3., 4., 5.], &sim); assert_eq!(result, 0.0); } #[test] fn test_kge_perfect() { let data: &[f64; 7] = &[1., 2., 3., 4., 5., 6., 7.];<|fim▁hole|>#[test] fn test_kge_climatology() { let obs_data: &[f64; 5] = &[1., 2., 3., 4., 5.]; let sim_data: &[f64; 5] = &[3., 3., 3., 3., 3.]; let s = hydromath::kge(obs_data, sim_data); assert_eq!(s, 1.0f64 - (2.0f64).sqrt()); } #[test] fn test_kge_biased_climatology() { let obs_data: &[f64; 5] = &[1., 2., 3., 4., 5.]; let sim_data: &[f64; 5] = &[6., 6., 6., 6., 6.]; let s = hydromath::kge(obs_data, sim_data); assert_eq!(s, 1.0f64 - (3.0f64).sqrt()); } #[test] fn test_kge() { let obs_data: &[f64; 13] = &[1., 2., 3., 4., 5., 6., 7., 6., 5., 4., 3., 2., 1.]; let sim_data: &[f64; 13] = &[1., 2., 3., 4., 5., 6., 6., 6., 5., 4., 3., 2., 1.]; let s = hydromath::kge(obs_data, sim_data); assert_eq!(s, 0.93444263181747966307000297092599794268608093261719); }<|fim▁end|>
let s = hydromath::kge(data, data); assert_eq!(s, 1.0); }
<|file_name|>config.py<|end_file_name|><|fim▁begin|>""" configuration module for awsu, contains two objects """ import boto3 import sqlite3 import logging import getpass import datetime import configparser import uuid import requests import json from dateutil.tz import tzutc from urllib.parse import urlencode, quote_plus from os import environ from bs4 import BeautifulSoup import base64 from lxml import etree class Credential(object): """ credential class """ def __init__(self): self.conn = sqlite3.connect(environ.get('HOME') + '/.aws/config.db') self.initialize_database('credentials') def initialize_database(self, table): cur = self.conn.cursor() tables = cur.execute( "SELECT name FROM sqlite_master WHERE type='table'").fetchall() if not table in tables[0]: stmt = '''CREATE TABLE %s( profile text, access_key text, secret_key text, session_token text, expiration text) ''' % table[0] cur.execute(stmt) self.conn.commit() def get_session(self, profile="default"): if profile is None: profile = "default" cur = self.conn.cursor() self.session = cur.execute( "SELECT * FROM credentials WHERE profile=? LIMIT 1", (profile,)) self.session = self.session.fetchone() if self.session is None or self.is_expired(): if self.is_expired(): cur.execute("DELETE FROM credentials WHERE profile=?", (profile,)) self.conn.commit() creds = self.get_credentials(profile) cur.execute("INSERT INTO credentials VALUES(?,?,?,?,?)", creds) self.conn.commit() return { 'AWS_ACCESS_KEY_ID': creds[1], 'AWS_SECRET_ACCESS_KEY': creds[2], 'AWS_SESSION_TOKEN': creds[3], 'AWS_SECURITY_TOKEN': creds[3] } else: return { 'AWS_ACCESS_KEY_ID': self.session[1], 'AWS_SECRET_ACCESS_KEY': self.session[2], 'AWS_SESSION_TOKEN': self.session[3], 'AWS_SECURITY_TOKEN': self.session[3] } def get_credentials(self, profile="default"): """ return aws profile environment variables """ if profile is None: profile = 'default' # get session token if profile != 'saml': session = boto3.Session(profile_name=profile) sts = boto3.client('sts') user = User() token = getpass.getpass("Enter MFA Code : ") if profile == "default": res = sts.get_session_token( DurationSeconds=3600, SerialNumber=user.mfa, TokenCode=token ) elif profile == "saml": config_file = configparser.RawConfigParser() config_file.read(environ.get('HOME') + '/.aws/config') if not config_file.has_section(profile): config_file.add_section(profile) username = str(input("Google Email : ")) idp_id = str(input('IDP ID : ')) sp_id = str(input('SP ID : ')) else: username = config_file.get(profile, 'username') idp_id = config_file.get(profile, 'idpid') sp_id = config_file.get(profile, 'spid') passwd = getpass.getpass('Password : ') google = GoogleSAML(username, passwd, idp_id, sp_id) google.auth() saml_res = google.get_saml_response() doc = etree.fromstring(base64.b64decode(saml_res)) roles = google.parse_roles(doc) role_arn, provider = google.pick_one(roles) config_file.set(profile, 'username', google.username) config_file.set(profile, 'idpid', google.idp_id) config_file.set(profile, 'spid', google.sp_id) config_file.set(profile, 'role_arn', role_arn) config_file.set(profile, 'provider', provider) config_file.set(profile, 'durations', google.duration_seconds) with open(environ.get('HOME') + '/.aws/config', 'w+') as f: try: config_file.write(f) finally: f.close() print("Assuming " + config_file.get(profile, 'role_arn')) sts = boto3.client('sts') res = sts.assume_role_with_saml( RoleArn=config_file.get(profile, 'role_arn'), PrincipalArn=config_file.get(profile, 'provider'), SAMLAssertion=saml_res, DurationSeconds=config_file.get(profile, 'durations')) else: config_file = configparser.RawConfigParser() config_file.read(environ.get('HOME') + '/.aws/credentials') role_arn = config_file.get(profile, 'role_arn') role_name = role_arn.split('/')[-1] random_identifier = str(uuid.uuid4())[4:] role_session = ''.join( [user.username, role_name, random_identifier]) res = sts.assume_role( RoleArn=role_arn, RoleSessionName=role_session, DurationSeconds=3600, SerialNumber=user.mfa, TokenCode=token ) return ( profile, res['Credentials']['AccessKeyId'], res['Credentials']['SecretAccessKey'], res['Credentials']['SessionToken'], res['Credentials']['Expiration'] ) def clean_environment(self): """ remove aws environment variables """ for var in list(environ.keys()): if var.startswith('AWS_'):<|fim▁hole|> del environ[var] def is_expired(self): try: stored_date = self.session[4] except: return False now = datetime.datetime.utcnow() session_time = datetime.datetime.strptime( stored_date, '%Y-%m-%d %H:%M:%S+00:00') return now > session_time class User(object): def __init__(self): sts = boto3.client('sts') caller = sts.get_caller_identity() self.arn = caller['Arn'] self.account_id = caller['Account'] self.username = self.get_username() self.mfa = self.get_mfa() def get_username(self): username = str(self.arn).split('/')[-1] return username def get_mfa(self): mfa = "arn:aws:iam::" + self.account_id + ":mfa/" + self.username return mfa class GoogleSAML(object): def __init__(self, username, passwd, idp_id, sp_id): """ method for google saml auth init""" self.username = username self.password = passwd self.idp_id = idp_id self.sp_id = sp_id self.duration_seconds = 3600 payload = { 'idpid': str(self.idp_id), 'spid': str(self.sp_id), 'forceauthn': 'false' } params = urlencode(payload, quote_via=quote_plus) self.url = "https://accounts.google.com/o/saml2/initsso?" + params def auth(self): self.request = requests.Session() res = self.request.get(self.url) res.raise_for_status() page = BeautifulSoup(res.text, 'html.parser') gaia_loginform = page.find( 'form', {'id': 'gaia_loginform'}).get('action') payload = {} payload['gxf'] = page.find('input', {'name': 'gxf'}).get('value') payload['continue'] = page.find( 'input', {'name': 'continue'}).get('value') payload['ltmpl'] = page.find('input', {'name': 'ltmpl'}).get('value') payload['sarp'] = 1 payload['scc'] = 1 payload['oauth'] = page.find('input', {'name': 'oauth'}).get('value') payload['_utf8'] = page.find('input', {'name': '_utf8'}).get('value') payload['bgresponse'] = page.find( 'input', {'name': 'bgresponse'}).get('value') payload['Email'] = self.username payload['Passwd'] = self.password res = self.request.post(gaia_loginform, data=payload) res.raise_for_status() self.request.headers['Referer'] = res.url page = BeautifulSoup(res.text, 'html.parser') payload['ProfileInformation'] = page.find( 'input', {'name': 'ProfileInformation'}).get('value') payload['SessionState'] = page.find( 'input', {'name': 'SessionState'}).get('value') payload['Passwd'] = self.password passwd_challenge_url = page.find( 'form', {'id': 'gaia_loginform'}).get('action') res = self.request.post(passwd_challenge_url, data=payload) res.raise_for_status() self.request.headers['Referer'] = res.url if "challenge/az" in res.url: res = self.auth_prompt(res, payload) self.session_state = res def auth_prompt(self, session, payload): res = BeautifulSoup(session.text, 'html.parser') auth_url = session.url.split('?')[0] data_key = res.find('div', {'data-api-key': True}).get('data-api-key') data_tx_id = res.find('div', {'data-tx-id': True}).get('data-tx-id') params = { 'alt': 'json', 'key': data_key } params = urlencode(params, quote_via=quote_plus) prompt_url = "https://content.googleapis.com/cryptauth/v1/authzen/awaittx?" + params prompt_body = {'txId': data_tx_id} print("Open the Google App, and tap 'Yes' on the prompt to sign in ...") self.request.headers['Referer'] = session.url res_prompt = self.request.post(prompt_url, json=prompt_body) parsed = json.loads(res_prompt.text) payload = { 'challengeId': res.find('input', {'name': 'challengeId'}).get('value'), 'challengeType': res.find('input', {'name': 'challengeType'}).get('value'), 'continue': res.find('input', {'name': 'continue'}).get('value'), 'scc': res.find('input', {'name': 'scc'}).get('value'), 'sarp': res.find('input', {'name': 'sarp'}).get('value'), 'TL': res.find('input', {'name': 'TL'}).get('value'), 'gxf': res.find('input', {'name': 'gxf'}).get('value'), 'token': parsed['txToken'], 'action': res.find('input', {'name': 'action'}).get('value'), 'TrustDevice': 'on', } res = self.request.post(auth_url, data=payload) res.raise_for_status() return res def get_saml_response(self): res = BeautifulSoup(self.session_state.text, 'html.parser') saml_response = res.find( 'input', {'name': 'SAMLResponse'}).get('value') return saml_response def parse_roles(self, doc): roles = {} for x in doc.xpath('//*[@Name = "https://aws.amazon.com/SAML/Attributes/Role"]//text()'): if "arn:aws:iam:" not in x: continue res = x.split(',') roles[res[0]] = res[1] return roles def pick_one(self, roles): while True: for i, role in enumerate(roles): print("[{:>3d}] {}".format(i + 1, role)) prompt = 'Type the number (1 - {:d}) of the role to assume: '.format( len(roles)) choice = input(prompt) try: num = int(choice) return list(roles.items())[num - 1] except: print("Invalid choice, try again")<|fim▁end|>
<|file_name|>clipread.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- ##Copyright (c) 2017 Benoit Valot and Panisa Treepong ##[email protected] ##UMR 6249 Chrono-Environnement, Besançon, France ##Licence GPL from . import variables class ClipRead(): """Clip read object""" def __init__(self, alignedsegment): self.read_seq = alignedsegment.query_sequence self.read_name = alignedsegment.query_name self.read_start = alignedsegment.query_alignment_start #0 left self.read_end = alignedsegment.query_alignment_end #exclusive self.read_len = alignedsegment.query_alignment_length self.ref_start = alignedsegment.reference_start #0 left self.ref_end = alignedsegment.reference_end # exclusive self.ref_len = alignedsegment.reference_length self.cigar = alignedsegment.cigarstring self.cigartuples = alignedsegment.cigartuples self.isreverse = alignedsegment.is_reverse def isstartclip(self): """Test if the read is start or end clip, look at """ if self.cigartuples is None: raise Exception("ClipRead must be aligned") if self.cigartuples[0][0] in variables.cigarclip: return True elif self.cigartuples[-1][0] in variables.cigarclip: return False else: raise Exception("ClipRead must contain clip part at start or end") def getdr(self, drstart, drend): """Return the dr sequence if complete or return None""" s = self.read_start + (drstart - self.ref_start) ##if < 0, incomplet dr if s < 0: return None e = self.read_end - (self.ref_end - drend) if e > len(self.read_seq): return None return self.read_seq[s:e]<|fim▁hole|> """Return the position of the clip""" if self.isstartclip(): return self.ref_start else: return self.ref_end def getclipseq(self): """return clip part of the read, except for hard clip return None""" if len(self.read_seq) == self.read_len: return None if self.isstartclip(): return self.read_seq[:self.read_start] else: return self.read_seq[self.read_end:] def __len__(self): return len(self.read_seq) def __repr__(self): return self.read_seq def __str__(self): return str(self.ref_start) + ": " + str(self.read_start) + self.read_seq + \ str(self.read_end) + " :" + str(self.ref_end) if __name__=='__main__': import doctest doctest.testmod()<|fim▁end|>
def getclippos(self):
<|file_name|>item.ts<|end_file_name|><|fim▁begin|>export class Item { constructor(public title: string) { } <|fim▁hole|><|fim▁end|>
}
<|file_name|>onesignal-mock.ts<|end_file_name|><|fim▁begin|>import { ParsedUrlQuery } from 'querystring'; import * as nock from 'nock'; import * as response from './response'; import { APP1, APP1_DEVICE1, APP1_NOTIFICATION1, APP1_SEGMENT1, APP1_EXTERNAL_USER_ID1, MOCKED_API_HOST, MOCKED_FAILING_400_API_HOST, USER_AUTH_KEY, } from './constants'; import { APP_ID_FIELD_NAME, APP_ID_QUERY_NAME, APPS_PATH, APPS_SEGMENTS, APPS_USERS, DEVICES_CSVEXPORT, DEVICES_ONFOCUS, DEVICES_ONPURCHASE, DEVICES_ONSESSION, DEVICES_PATH, NOTIFICATIONS_HISTORY, NOTIFICATIONS_PATH, } from '../../src/constants'; // Client paths. const APP1_CREATE_NOTIFICATION_PATH = `/${NOTIFICATIONS_PATH}`; const APP1_CANCEL_NOTIFICATION1_PATH = `/${NOTIFICATIONS_PATH}/${APP1_NOTIFICATION1}`; const APP1_VIEW_NOTIFICATION1_PATH = `/${NOTIFICATIONS_PATH}/${APP1_NOTIFICATION1}`; const APP1_VIEW_NOTIFICATIONS_PATH = `/${NOTIFICATIONS_PATH}`; const APP1_NOTIFICATION_HISTORY_PATH = `/${NOTIFICATIONS_PATH}/${APP1_NOTIFICATION1}/${NOTIFICATIONS_HISTORY}`; const APP1_VIEW_DEVICES_PATH = `/${DEVICES_PATH}`; const APP1_VIEW_DEVICE1_PATH = `/${DEVICES_PATH}/${APP1_DEVICE1}`; const APP1_ADD_DEVICE_PATH = `/${DEVICES_PATH}`; const APP1_EDIT_DEVICE1_PATH = `/${DEVICES_PATH}/${APP1_DEVICE1}`; const APP1_EDIT_TAGS_WITH_EXTERNAL_USER_ID_PATH = `/${APPS_PATH}/${APP1.appId}/${APPS_USERS}/${APP1_EXTERNAL_USER_ID1}`; const APP1_DELETE_DEVICE1_PATH = `/${DEVICES_PATH}/${APP1_DEVICE1}`; const APP1_DEV1_NEW_SESSION_PATH = `/${DEVICES_PATH}/${APP1_DEVICE1}/${DEVICES_ONSESSION}`; const APP1_DEV1_NEW_PURCHASE_PATH = `/${DEVICES_PATH}/${APP1_DEVICE1}/${DEVICES_ONPURCHASE}`; const APP1_DEV1_INCREMENT_SESSION_LENGTH_PATH = `/${DEVICES_PATH}/${APP1_DEVICE1}/${DEVICES_ONFOCUS}`; const APP1_EXPORT_CSV_PATH = `/${DEVICES_PATH}/${DEVICES_CSVEXPORT}`; const APP1_CREATE_SEGMENT_PATH = `/${APPS_PATH}/${APP1.appId}/${APPS_SEGMENTS}`; const APP1_DELETE_SEGMENT1_PATH = `/${APPS_PATH}/${APP1.appId}/${APPS_SEGMENTS}/${APP1_SEGMENT1}`; // User Client paths. const VIEW_APPS_PATH = `/${APPS_PATH}`; const VIEW_APP1_PATH = `/${APPS_PATH}/${APP1.appId}`; const CREATE_APP_PATH = `/${APPS_PATH}`; const UPDATE_APP1_PATH = `/${APPS_PATH}/${APP1.appId}`; // APP1 options for nock. const app1NockOptions = { reqheaders: { authorization: `Basic ${APP1.apiKey}`, 'Content-Type': 'application/json; charset=utf-8', }, }; // User auth key options for nock. const userNockOptions = { reqheaders: { authorization: `Basic ${USER_AUTH_KEY}`, 'Content-Type': 'application/json; charset=utf-8', }, }; // expect app_id field to be in request query string. const expectAppIdInQuery = (appId: string) => { return (queryObject: ParsedUrlQuery): boolean => { return queryObject && queryObject[APP_ID_QUERY_NAME] === appId; }; }; // expect app_id field to be in request body. const expectAppIdInBody = (appId: string) => { return (body: any): boolean => { return body && body[APP_ID_FIELD_NAME] === appId; }; }; // Client - 200 OK responses for MOCKED_API_HOST and APP1 nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_CREATE_NOTIFICATION_PATH, expectAppIdInBody(APP1.appId)) .reply(response.createNotificationResponse['200OK'].status, response.createNotificationResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .delete(APP1_CANCEL_NOTIFICATION1_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.cancelNotificationResponse['200OK'].status, response.cancelNotificationResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .get(APP1_VIEW_NOTIFICATION1_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.viewNotificationResponse['200OK'].status, response.viewNotificationResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .get(APP1_VIEW_NOTIFICATIONS_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.viewNotificationsResponse['200OK'].status, response.viewNotificationsResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_NOTIFICATION_HISTORY_PATH, expectAppIdInBody(APP1.appId)) .reply(response.notificationHistoryResponse['200OK'].status, response.notificationHistoryResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .get(APP1_VIEW_DEVICES_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.viewDevicesResponse['200OK'].status, response.viewDevicesResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .get(APP1_VIEW_DEVICES_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.viewDevicesResponse['200OK'].status, response.viewDevicesResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions)<|fim▁hole|> nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_ADD_DEVICE_PATH, expectAppIdInBody(APP1.appId)) .reply(response.addDeviceResponse['200OK'].status, response.addDeviceResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .put(APP1_EDIT_DEVICE1_PATH, expectAppIdInBody(APP1.appId)) .reply(response.editDeviceResponse['200OK'].status, response.editDeviceResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .put(APP1_EDIT_TAGS_WITH_EXTERNAL_USER_ID_PATH, expectAppIdInBody(APP1.appId)) .reply( response.editTagsWithExternalUserIdResponse['200OK'].status, response.editTagsWithExternalUserIdResponse['200OK'].response, ) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .delete(APP1_DELETE_DEVICE1_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.deleteDeviceResponse['200OK'].status, response.deleteDeviceResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_DEV1_NEW_SESSION_PATH, expectAppIdInBody(APP1.appId)) .reply(response.newSessionResponse['200OK'].status, response.newSessionResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_DEV1_NEW_PURCHASE_PATH, expectAppIdInBody(APP1.appId)) .reply(response.newPurchaseResponse['200OK'].status, response.newPurchaseResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_DEV1_INCREMENT_SESSION_LENGTH_PATH, expectAppIdInBody(APP1.appId)) .reply( response.incrementSessionLengthResponse['200OK'].status, response.incrementSessionLengthResponse['200OK'].response, ) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_EXPORT_CSV_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.exportCSVResponse['200OK'].status, response.exportCSVResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .post(APP1_CREATE_SEGMENT_PATH) .reply(response.createSegmentResponse['200OK'].status, response.createSegmentResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, app1NockOptions) .delete(APP1_DELETE_SEGMENT1_PATH) .reply(response.deleteSegmentResponse['200OK'].status, response.deleteSegmentResponse['200OK'].response) .persist(); // Client - 400 BAD_REQUEST responses for MOCKED_FAILING_400_API_HOST and APP1 nock(MOCKED_FAILING_400_API_HOST, app1NockOptions) .post(APP1_CREATE_NOTIFICATION_PATH, expectAppIdInBody(APP1.appId)) .reply(response.createNotificationResponse['400BAD'].status, response.createNotificationResponse['400BAD'].response) .persist(); // UserClient - 200 OK responses for MOCKED_API_HOST and APP1 nock(MOCKED_API_HOST, userNockOptions) .get(VIEW_APPS_PATH) .reply(response.viewAppsResponse['200OK'].status, response.viewAppsResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, userNockOptions) .get(VIEW_APP1_PATH) .reply(response.viewAppResponse['200OK'].status, response.viewAppResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, userNockOptions) .post(CREATE_APP_PATH) .reply(response.createAppResponse['200OK'].status, response.createAppResponse['200OK'].response) .persist(); nock(MOCKED_API_HOST, userNockOptions) .put(UPDATE_APP1_PATH) .reply(response.updateAppResponse['200OK'].status, response.updateAppResponse['200OK'].response) .persist(); // UserClient - 400 BAD_REQUEST responses for MOCKED_FAILING_400_API_HOST and APP1 nock(MOCKED_FAILING_400_API_HOST, userNockOptions) .post(CREATE_APP_PATH) .reply(response.createAppResponse['400BAD'].status, response.createAppResponse['400BAD'].response) .persist();<|fim▁end|>
.get(APP1_VIEW_DEVICE1_PATH) .query(expectAppIdInQuery(APP1.appId)) .reply(response.viewDeviceResponse['200OK'].status, response.viewDeviceResponse['200OK'].response) .persist();
<|file_name|>svm.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (C) 2008-2015, Luis Pedro Coelho <[email protected]> # vim: set ts=4 sts=4 sw=4 expandtab smartindent: # # License: MIT. See COPYING.MIT file in the milk distribution from __future__ import division from .classifier import normaliselabels, ctransforms_model from .base import supervised_model import numpy import numpy as np from . import _svm __all__ = [ 'rbf_kernel', 'polynomial_kernel', 'precomputed_kernel', 'dot_kernel', 'svm_raw', 'svm_binary', 'svm_to_binary', 'svm_sigmoidal_correction', 'sigma_value_fisher', 'fisher_tuned_rbf_svm', ] def _svm_apply(SVM, q): ''' f_i = _svm_apply(SVM, q) @internal: This is mostly used for testing ''' X,Y,Alphas,b,C,kernel=SVM N = len(X) s = 0.0 for i in range(N): s += Alphas[i] * Y[i] * kernel(q, X[i]) return s - b def svm_learn_smo(X,Y,kernel,C,eps=1e-4,tol=1e-2,cache_size=(1<<20)): ''' Learn a svm classifier X: data Y: labels in SVM format (ie Y[i] in (1,-1)) This is a very raw interface. In general, you should use a class like svm_classifier. Implements the Sequential Minimum Optimisation Algorithm from Platt's "Fast training of support vector machines using sequential minimal optimization" in Advances in kernel methods: support vector learning Pages: 185 - 208 Year of Publication: 1999 ISBN:0-262-19416-3 ''' assert numpy.all(numpy.abs(Y) == 1) assert len(X) == len(Y) N = len(Y) Y = Y.astype(numpy.int32) params = numpy.array([0,C,1e-3,1e-5],numpy.double) Alphas0 = numpy.zeros(N, numpy.double) _svm.eval_SMO(X,Y,Alphas0,params,kernel,cache_size) return Alphas0, params[0] def svm_learn_libsvm(features, labels, kernel, C, eps=1e-4, tol=1e-2, cache_size=(1<<20), alphas=None): ''' Learn a svm classifier using LIBSVM optimiser This is a very raw interface. In general, you should use a class like svm_classifier. This uses the LIBSVM optimisation algorithm Parameters ---------- X : ndarray data Y : ndarray labels in SVM format (ie Y[i] in (1,-1)) kernel : kernel C : float eps : float, optional tol : float, optional cache_size : int, optional alphas : ndarray, optional Returns ------- alphas : ndarray b : float ''' if not np.all(np.abs(labels) == 1): raise ValueError('milk.supervised.svm.svm_learn_libsvm: Y[i] != (-1,+1)') assert len(features) == len(labels) n = len(labels) labels = labels.astype(np.int32) p = -np.ones(n, np.double) params = np.array([0,C,eps,tol], dtype=np.double) if alphas is None: alphas = np.zeros(n, np.double) elif alphas.dtype != np.double or len(alphas) != n: raise ValueError('milk.supervised.svm_learn_libsvm: alphas is in wrong format') _svm.eval_LIBSVM(features, labels, alphas, p, params, kernel, cache_size) return alphas, params[0] class preprocessed_rbf_kernel(object): def __init__(self, X, sigma, beta): self.X = X self.Xsum = (X**2).sum(1) self.sigma = sigma self.beta = beta def call_many(self, qs): from milk.unsupervised import pdist dists = pdist(self.X, qs, 'euclidean2') dists /= -self.sigma np.exp(dists, dists) dists *= self.beta return dists.T def __call__(self, q): minus_d2_sigma = np.dot(self.X,q) minus_d2_sigma *= 2. minus_d2_sigma -= self.Xsum minus_d2_sigma -= np.dot(q,q) minus_d2_sigma /= self.sigma return self.beta * np.exp(minus_d2_sigma) class rbf_kernel(object): ''' kernel = rbf_kernel(sigma,beta=1) Radial Basis Function kernel Returns a kernel (ie, a function that implements) beta * exp( - ||x1 - x2|| / sigma) ''' def __init__(self, sigma, beta=1): self.sigma = sigma self.beta = beta self.kernel_nr_ = 0 self.kernel_arg_ = float(sigma) def __call__(self, x1, x2): d2 = x1 - x2 d2 **= 2 d2 = d2.sum() res = self.beta*np.exp(-d2/self.sigma) return res def __str__(self): return 'rbf_kernel(%s, %s)' % (self.sigma, self.beta) __repr__ = __str__ def preprocess(self, X): return preprocessed_rbf_kernel(X, self.sigma, self.beta) class polynomial_kernel(object): ''' kernel = polynomial_kernel(d,c=1) returns a kernel (ie, a function) that implements: (<x1,x2>+c)**d ''' def __init__(self, d, c=1): self.d = d self.c = c def __call__(self,x1,x2): return (np.dot(x1,x2)+self.c)**self.d class precomputed_kernel(object): ''' kernel = precomputed_kernel(kmatrix) A "fake" kernel which is precomputed. ''' def __init__(self, kmatrix, copy=False): self.kmatrix = np.ascontiguousarray(kmatrix, np.double, copy=copy) self.kernel_nr_ = 1 self.kernel_arg_ = 0. def __call__(self, x0, x1): return self.kmatrix[x0,x1] class _call_kernel(object): def __init__(self, k, svs): self.svs = svs self.kernel = k def __call__(self, q): return np.array([self.kernel(s, q) for s in self.svs]) class preprocessed_dot_kernel(object): def __init__(self, svs): self.svs = svs def __call__(self, x1): return np.dot(self.svs, x1) class dot_kernel(object): def __init__(self): self.kernel_nr_ = 2 self.kernel_arg_ = 0. def __call__(self, x0, x1): return np.dot(x0, x1) def preprocess(self, svs): return preprocessed_dot_kernel(svs) class svm_raw_model(supervised_model): def __init__(self, svs, Yw, b, kernel): self.svs = svs self.Yw = Yw self.b = b self.kernel = kernel try: self.kernelfunction = self.kernel.preprocess(self.svs) except AttributeError: self.kernelfunction = _call_kernel(self.kernel, self.svs) def apply_many(self, qs): try: qs = self.kernelfunction.call_many(qs) except AttributeError: qs = np.array(list(map(self.kernelfunction, qs))) return np.dot(qs, self.Yw) - self.b def apply(self, q): Q = self.kernelfunction(q) return np.dot(Q, self.Yw) - self.b class svm_raw(object): ''' svm_raw: classifier classifier = svm_raw(kernel, C, eps=1e-3, tol=1e-8) Parameters ---------- kernel : callable the kernel to use. This should be a function that takes two data arguments see rbf_kernel and polynomial_kernel. C : float the C parameter eps : float, optional the precision to which to solve the problem (default 1e-3) tol : float, optional (|x| < tol) is considered zero ''' def __init__(self, kernel=None, C=1., eps=1e-3, tol=1e-8): self.C = C self.kernel = kernel self.eps = eps self.tol = tol self.algorithm = 'libsvm' def train(self, features, labels, normalisedlabels=False, **kwargs): assert self.kernel is not None, 'milk.supervised.svm_raw.train: kernel not set!' assert self.algorithm in ('libsvm','smo'), 'milk.supervised.svm_raw: unknown algorithm (%s)' % self.algorithm assert not (np.isinf(self.C) or np.isnan(self.C)), 'milk.supervised.svm_raw: setting C to NaN or Inf causes problems.' features = np.asanyarray(features) if normalisedlabels: Y = labels.copy() else: Y,_ = normaliselabels(labels) assert Y.max() == 1, 'milk.supervised.svm_raw can only handle binary problems' Y *= 2 Y -= 1 kernel = self.kernel try: kernel = (self.kernel.kernel_nr_, self.kernel.kernel_arg_) features = np.ascontiguousarray(features, np.double) except AttributeError: pass if self.algorithm == 'smo': alphas,b = svm_learn_smo(features,Y,kernel,self.C,self.eps,self.tol) else: alphas,b = svm_learn_libsvm(features,Y,kernel,self.C,self.eps,self.tol) svsi = (alphas != 0) svs = features[svsi] w = alphas[svsi] Y = Y[svsi] Yw = w * Y return svm_raw_model(svs, Yw, b, self.kernel) def get_params(self): return self.C, self.eps,self.tol def set_params(self,params): self.C,self.eps,self.tol = params def set_option(self, optname, value): setattr(self, optname, value) def learn_sigmoid_constants(F,Y, max_iters=None, min_step=1e-10, sigma=1e-12, eps=1e-5): ''' A,B = learn_sigmoid_constants(F,Y)<|fim▁hole|> This is a very low-level interface look into the svm_classifier class. Parameters ---------- F : Values of the function F Y : Labels (in boolean format, ie, in (0,1)) Other Parameters ---------------- max_iters : Maximum nr. of iterations min_step : Minimum step sigma : sigma eps : A small number Reference for Implementation ---------------------------- Implements the algorithm from "A Note on Platt's Probabilistic Outputs for Support Vector Machines" by Lin, Lin, and Weng. Machine Learning, Vol. 68, No. 3. (23 October 2007), pp. 267-276 ''' # Below we use safe constructs to avoid using the overflown values, but we # must compute them because of the way numpy works. errorstate = np.seterr(over='ignore') # the deci[i] array is called F in this code F = np.asanyarray(F) Y = np.asanyarray(Y) assert len(F) == len(Y) assert numpy.all( (Y == 1) | (Y == 0) ) if max_iters is None: max_iters = 1000 prior1 = Y.sum() prior0 = len(F)-prior1 hi_t = (prior1+1.)/(prior1+2.) lo_t = 1./(prior0+2.) T = Y*hi_t + (1-Y)*lo_t A = 0. B = np.log( (prior0+1.)/(prior1+1.) ) def target(A,B): fApB = F*A + B lef = np.log1p(np.exp(fApB)) lemf = np.log1p(np.exp(-fApB)) fvals = np.choose(fApB >= 0, ( T*fApB + lemf, (T-1.)*fApB + lef)) return np.sum(fvals) fval = target(A,B) for iter in range(max_iters): fApB = F*A + B ef = np.exp(fApB) emf = np.exp(-fApB) p = np.choose(fApB >= 0, ( emf/(1.+emf), 1./(1.+ef) )) q = np.choose(fApB >= 0, ( 1/(1.+emf), ef/(1.+ef) )) d2 = p * q h11 = np.dot(F*F,d2) + sigma h22 = np.sum(d2) + sigma h21 = np.dot(F,d2) d1 = T - p g1 = np.dot(F,d1) g2 = np.sum(d1) if abs(g1) < eps and abs(g2) < eps: # Stopping criteria break det = h11*h22 - h21*h21 dA = - (h22*g1 - h21*g2)/det dB = - (h21*g1 + h11*g2)/det gd = g1*dA + g2*dB stepsize = 1. while stepsize >= min_step: newA = A + stepsize*dA newB = B + stepsize*dB newf = target(newA,newB) if newf < fval+eps*stepsize*gd: A = newA B = newB fval = newf break stepsize /= 2 else: print('Line search fails') break np.seterr(**errorstate) return A,B class svm_binary_model(supervised_model): def __init__(self, classes): self.classes = classes self.raw = False def apply(self,f): if self.raw: return f return self.classes[f >= 0.] class svm_binary(object): ''' classifier = svm_binary() model = classifier.train(features, labels) assert model.apply(f) in labels ''' def train(self, features, labels, normalisedlabels=False, **kwargs): if normalisedlabels: return svm_binary_model( (0,1) ) assert len(labels) >= 2, 'Cannot train from a single example' names = sorted(set(labels)) assert len(names) == 2, 'milk.supervised.svm.svm_binary.train: Can only handle two class problems' return svm_binary_model(names) class svm_to_binary(object): ''' svm_to_binary(base_svm) A simple wrapper so that svm_to_binary(base_svm) is a model that takes the base_svm classifier and then binarises its model output. NOTE: This class does the same job as:: ctransforms(base_svm, svm_binary()) ''' def __init__(self, svm_base): ''' binclassifier = svm_to_binary(svm_base) a classifier that binarises the output of svm_base. ''' self.base = svm_base def train(self, features, labels, **kwargs): model = self.base.train(features, labels, **kwargs) binary = svm_binary() binary_model = binary.train(features, labels, **kwargs) return ctransforms_model([model, binary_model]) def set_option(self, opt, value): self.base.set_option(opt, value) class svm_sigmoidal_correction_model(supervised_model): def __init__(self, A, B): self.A = A self.B = B def apply(self,features): return 1./(1.+numpy.exp(features*self.A+self.B)) class svm_sigmoidal_correction(object): ''' svm_sigmoidal_correction : a classifier Sigmoidal approximation for obtaining a probability estimate out of the output of an SVM. ''' def __init__(self): self.max_iters = None def train(self, features, labels, **kwargs): A,B = learn_sigmoid_constants(features,labels,self.max_iters) return svm_sigmoidal_correction_model(A, B) def get_params(self): return self.max_iters def set_params(self,params): self.max_iters = params def sigma_value_fisher(features,labels): ''' f = sigma_value_fisher(features,labels) value_s = f(s) Computes a function which computes how good the value of sigma is for the features. This function should be *minimised* for a good value of sigma. Parameters ----------- features : features matrix as 2-ndarray. Returns ------- f : a function: float -> float this function should be minimised for a good `sigma` Reference ---------- Implements the measure in "Determination of the spread parameter in the Gaussian kernel for classification and regression" by Wenjian Wanga, Zongben Xua, Weizhen Luc, and Xiaoyun Zhanga ''' features = np.asanyarray(features) xij = np.dot(features,features.T) f2 = np.sum(features**2,1) d = f2-2*xij d = d.T + f2 N1 = (labels==0).sum() N2 = (labels==1).sum() C1 = -d[labels == 0][:,labels == 0] C2 = -d[labels == 1][:,labels == 1] C12 = -d[labels == 0][:,labels == 1] C1 = C1.copy() C2 = C2.copy() C12 = C12.copy() def f(sigma): sigma = float(sigma) N1 = C1.shape[0] N2 = C2.shape[0] if C12.shape != (N1,N2): raise ValueError C1v = np.sum(np.exp(C1/sigma))/N1 C2v = np.sum(np.exp(C2/sigma))/N2 C12v = np.sum(np.exp(C12/sigma))/N1/N2 return (N1 + N2 - C1v - C2v)/(C1v/N1+C2v/N2 - 2.*C12v) return f class fisher_tuned_rbf_svm(object): ''' F = fisher_tuned_rbf_svm(sigmas, base) Returns a wrapper classifier that uses RBF kernels automatically tuned using sigma_value_fisher. ''' def __init__(self, sigmas, base): self.sigmas = sigmas self.base = base def train(self, features, labels, **kwargs): f = sigma_value_fisher(features, labels) fs = [f(s) for s in self.sigmas] self.sigma = self.sigmas[np.argmin(fs)] self.base.set_option('kernel',rbf_kernel(self.sigma)) return self.base.train(features, labels, **kwargs)<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models class Music(models.Model): url = models.CharField('URL', max_length=255) title = models.CharField('título', max_length=200, blank=True)<|fim▁hole|> file = models.FileField(upload_to='')<|fim▁end|>
artist = models.CharField('artista', max_length=200, blank=True) genre = models.CharField('gênero', max_length=100, blank=True)
<|file_name|>test_litemq.py<|end_file_name|><|fim▁begin|># Copyright (C) Ivan Kravets <[email protected]> # See LICENSE for details. # pylint: disable=W0212,W0613 from twisted.internet.defer import Deferred, DeferredList from twisted.python.failure import Failure from twisted.trial.unittest import TestCase import smartanthill.litemq.exchange as ex from smartanthill.exception import LiteMQResendFailed class LiteMQCase(TestCase): g_resent_nums = 0 def test_declare_exchange(self): for type_, class_ in {"direct": ex.ExchangeDirect, "fanout": ex.ExchangeFanout}.items(): self.assertIsInstance( ex.ExchangeFactory().newExchange("exchange_name", type_), class_ ) self.assertRaises( AttributeError, lambda: ex.ExchangeFactory().newExchange("exchange_name", "unknown-type") ) def test_queue_ack_success(self): message, properties = "Test message", {"foo": "bar"} def _callback(m, p): self.assertEqual(m, message) self.assertEqual(p, properties) return True def _resback(result): self.assertIsInstance(result, bool) self.assertEqual(result, True) q = ex.Queue("queue_name", "routing_key", _callback, ack=True) d = q.put(message, properties) self.assertIsInstance(d, Deferred) d.addCallbacks(_resback) return d def test_queue_ack_fails(self): self.g_resent_nums, resend_max = 0, 3 def _callback(m, p): self.g_resent_nums += 1 # test exception if self.g_resent_nums == 1: return 1/0 # test "ack-invalid" that is equl to False else: return False def _errback(result): self.assertIsInstance(result, Failure) self.assertTrue(result.check(LiteMQResendFailed)) self.assertEqual(resend_max, self.g_resent_nums) q = ex.Queue("queue_name", "routing_key", _callback, ack=True) q.RESEND_MAX = resend_max q.RESEND_DELAY = 0 d = q.put("Test message", {"foo": "bar"}) self.assertIsInstance(d, Deferred) d.addBoth(_errback) return d def test_queue_nonack(self): self.g_resent_nums, resend_max = 0, 3 def _callback(m, p): self.g_resent_nums += 1 return 1/0 def _errback(result): self.assertNotIsInstance(result, Failure) self.assertIsInstance(result, bool) self.assertEqual(result, False) self.assertEqual(self.g_resent_nums, 1) q = ex.Queue("queue_name", "routing_key", _callback, ack=False) q.RESEND_MAX = resend_max q.RESEND_DELAY = 0 d = q.put("Test message", {"foo": "bar"}) self.assertIsInstance(d, Deferred) d.addBoth(_errback) return d def test_exchange_direct(self): message, properties = "Test message", {"foo": "bar"} def _callback(m, p): self.assertEqual(m, message) self.assertEqual(p, properties) myex = ex.ExchangeFactory().newExchange("exchange_name", "direct") myex.bind_queue("queue_name", "routing_key", _callback) empty_result = myex.publish("invalid_routing_key", message, properties) self.assertEqual(empty_result, []) result = myex.publish("routing_key", message, properties) self.assertIsInstance(result, list) self.assertEqual(len(result), 1) d = result[0] def _resback(result): self.assertEqual(result, None) myex.unbind_queue("queue_name") self.assertEqual(len(myex._queues), 0) self.assertIsInstance(d, Deferred) d.addCallbacks(_resback) return d def test_exchange_fanout(self): self.g_resent_nums = 0 message, properties = "Test message", {"foo": "bar"} <|fim▁hole|> self.g_resent_nums += 1 self.assertEqual(m, message) self.assertEqual(p, properties) myex = ex.ExchangeFactory().newExchange("exchange_name", "fanout") myex.bind_queue("queue_name", "routing_key", _callback) result = myex.publish("invalid_routing_key", message, properties) self.assertIsInstance(result, list) self.assertEqual(len(result), 1) d1 = result[0] result = myex.publish("routing_key", message, properties) self.assertIsInstance(result, list) self.assertEqual(len(result), 1) d2 = result[0] self.assertIsInstance(d1, Deferred) self.assertIsInstance(d2, Deferred) dl = DeferredList([d1, d2]) def _resback(result): self.assertEqual(result, [(True, None), (True, None)]) dl.addCallbacks(_resback) return dl<|fim▁end|>
def _callback(m, p):
<|file_name|>ls.go<|end_file_name|><|fim▁begin|>package cmd import ( "errors" "github.com/cretz/go-safeclient/client" "github.com/spf13/cobra" "log" "os" ) var lsShared bool var lsCmd = &cobra.Command{ Use: "ls [dir]", Short: "Fetch directory information", RunE: func(cmd *cobra.Command, args []string) error { if len(args) != 1 { return errors.New("One and only one argument allowed")<|fim▁hole|> c, err := getClient() if err != nil { log.Fatalf("Unable to obtain client: %v", err) } info := client.GetDirInfo{DirPath: args[0], Shared: lsShared} dir, err := c.GetDir(info) if err != nil { log.Fatalf("Failed to list dir: %v", err) } writeDirResponseTable(os.Stdout, dir) return nil }, } func init() { lsCmd.Flags().BoolVarP(&lsShared, "shared", "s", false, "Use shared area for user") RootCmd.AddCommand(lsCmd) }<|fim▁end|>
}
<|file_name|>PojoCollectionArtifactProvider.java<|end_file_name|><|fim▁begin|>package at.jku.sea.cloud.rest.pojo.stream.provider; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo.Id; import com.fasterxml.jackson.annotation.JsonTypeName;<|fim▁hole|>import at.jku.sea.cloud.rest.pojo.PojoCollectionArtifact; @JsonTypeInfo(use = Id.NAME, property = "__type") @JsonTypeName(value = "CollectionArtifactProvider") public class PojoCollectionArtifactProvider extends PojoProvider { private PojoCollectionArtifact collectionArtifact; public PojoCollectionArtifactProvider() { } public PojoCollectionArtifactProvider(PojoCollectionArtifact collectionArtifact) { this.collectionArtifact = collectionArtifact; } public PojoCollectionArtifact getCollectionArtifact() { return collectionArtifact; } public void setCollectionArtifact(PojoCollectionArtifact collectionArtifact) { this.collectionArtifact = collectionArtifact; } }<|fim▁end|>
<|file_name|>bitcoin_es.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="es" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About CoinAwesome</source> <translation>Acerca de CoinAwesome</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;CoinAwesome&lt;/b&gt; version</source> <translation>&lt;b&gt;CoinAwesome&lt;/b&gt; versión</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The Blackcoin developers Copyright © 2015 The CoinAwesome developers</source> <translation>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The Blackcoin developers Copyright © 2015 The CoinAwesome developers</translation> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or &lt;a href=&quot;http://www.opensource.org/licenses/mit-license.php&quot;&gt;http://www.opensource.org/licenses/mit-license.php&lt;/a&gt;. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (&lt;a href=&quot;https://www.openssl.org/&quot;&gt;https://www.openssl.org/&lt;/a&gt;) and cryptographic software written by Eric Young (&lt;a href=&quot;mailto:[email protected]&quot;&gt;[email protected]&lt;/a&gt;) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Libreta de Direcciones</translation> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>Haga doble clic para editar una etiqueta o dirección </translation> </message> <message> <location line="+24"/> <source>Create a new address</source> <translation>Crear una nueva dirección</translation> </message> <message> <location line="+10"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar la dirección seleccionada al portapapeles del sistema</translation> </message> <message> <location line="-7"/> <source>&amp;New Address</source> <translation>&amp;Nueva Dirección</translation> </message> <message> <location line="-43"/> <source>These are your CoinAwesome addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Estas son las direcciones de CoinAwesome para recibir pagos. Es posible que desee dar una diferente a cada remitente para que pueda realizar un seguimiento de quien te está pagando.</translation> </message> <message> <location line="+53"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar dirección</translation> </message> <message> <location line="+7"/> <source>Show &amp;QR Code</source> <translation>Enseñar &amp;QR Code</translation> </message> <message> <location line="+7"/> <source>Sign a message to prove you own a CoinAwesome address</source> <translation>Firmar un mensaje para demostrar que es dueño de su dirección de CoinAwesome</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Firmar &amp;Mensaje</translation> </message> <message> <location line="+17"/> <source>Delete the currently selected address from the list</source> <translation>Borrar de la lista la dirección seleccionada</translation> </message> <message> <location line="-10"/> <source>Verify a message to ensure it was signed with a specified CoinAwesome address</source> <translation>Verifique el mensaje para asegurarse que fue firmado por una dirección específica de CoinAwesome</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verifique Mensaje</translation> </message> <message> <location line="+10"/> <source>&amp;Delete</source> <translation>&amp;Eliminar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;etiqueta</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>&amp;Editar</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation>Exportar Data de Libro de Direcciones</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Archivos de columnas separadas por coma (*.csv)</translation><|fim▁hole|> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error exportando</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No se pudo escribir en el archivo %1</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+145"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sin etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Diálogo de contraseña</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introducir contraseña</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nueva contraseña</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repita la nueva contraseña</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Sirve para desactivar SendMoney cuando la cuenta del Sistema Operativo está comprometida. No ofrece seguridad real.</translation> </message> <message> <location line="+3"/> <source>For staking only</source> <translation>Para &quot;Staking&quot; solamente</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+38"/> <source>Encrypt wallet</source> <translation>Cifrar el monedero</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Esta operación requiere su contraseña para desbloquear el monedero.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloquear monedero</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Esta operación requiere su contraseña para descifrar el monedero.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Descifrar el monedero</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Cambiar contraseña</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introduzca la contraseña anterior del monedero y la nueva. </translation> </message> <message> <location line="+45"/> <source>Confirm wallet encryption</source> <translation>Confirmar cifrado del monedero</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>Advertencia: Si encripta su cartera y pierde su frase de contraseña, puede &lt;b&gt;PERDER TODAS SUS MONEDAS&lt;/ b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>¿Seguro que desea cifrar su monedero?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANTE: Cualquier copia de seguridad que haya realizado previamente de su archivo de monedero debe reemplazarse con el nuevo archivo de monedero cifrado. Por razones de seguridad, las copias de seguridad previas del archivo de monedero no cifradas serán inservibles en cuanto comience a usar el nuevo monedero cifrado.</translation> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Aviso: ¡La tecla de bloqueo de mayúsculas está activada!</translation> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>Monedero cifrado</translation> </message> <message> <location line="-140"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>CoinAwesome will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>CoinAwesome cerrará para terminar el proceso de encriptación. Recuerde que la encriptación de su monedero no puede proteger completamente que sus monedas sean robadas por malware infectando su computadora.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Ha fallado el cifrado del monedero</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Ha fallado el cifrado del monedero debido a un error interno. El monedero no ha sido cifrado.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>Las contraseñas no coinciden.</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>Ha fallado el desbloqueo del monedero</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La contraseña introducida para descifrar el monedero es incorrecta.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Ha fallado el descifrado del monedero</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Se ha cambiado correctamente la contraseña del monedero.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+297"/> <source>Sign &amp;message...</source> <translation>Firmar &amp;mensaje...</translation> </message> <message> <location line="-64"/> <source>Show general overview of wallet</source> <translation>Mostrar vista general del monedero</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>&amp;Transacciones</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Examinar el historial de transacciones</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation>&amp;Libreta de Direcciones</translation> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation>Editar la lista de direcciones y etiquetas almacenadas</translation> </message> <message> <location line="-18"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostrar la lista de direcciones para recibir pagos</translation> </message> <message> <location line="+34"/> <source>E&amp;xit</source> <translation>&amp;Salir</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Salir de la aplicación</translation> </message> <message> <location line="+4"/> <source>Show information about CoinAwesome</source> <translation>Mostrar información sobre CoinAwesome</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Acerca de &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostrar información acerca de Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opciones...</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Cifrar monedero…</translation> </message> <message> <location line="+2"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Guardar copia de seguridad del monedero...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Cambiar la contraseña…</translation> </message> <message> <location line="+9"/> <source>&amp;Export...</source> <translation>&amp;Exportar...</translation> </message> <message> <location line="-55"/> <source>Send coins to a CoinAwesome address</source> <translation>Enviar monedas a una dirección de CoinAwesome</translation> </message> <message> <location line="+39"/> <source>Modify configuration options for CoinAwesome</source> <translation>Modificar las opciones de configuración para CoinAwesome</translation> </message> <message> <location line="+17"/> <source>Export the data in the current tab to a file</source> <translation>Exportar los datos en la ficha actual a un archivo</translation> </message> <message> <location line="-13"/> <source>Encrypt or decrypt wallet</source> <translation>Cifrar o descifrar el monedero</translation> </message> <message> <location line="+2"/> <source>Backup wallet to another location</source> <translation>Copia de seguridad del monedero en otra ubicación</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Cambiar la contraseña utilizada para el cifrado del monedero</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>Ventana de &amp;depuración</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Abrir la consola de depuración y diagnóstico</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>&amp;Verificar mensaje...</translation> </message> <message> <location line="-214"/> <location line="+551"/> <source>CoinAwesome</source> <translation>CoinAwesome</translation> </message> <message> <location line="-551"/> <source>Wallet</source> <translation>Monedero</translation> </message> <message> <location line="+193"/> <source>&amp;About CoinAwesome</source> <translation>Acerca de CoinAwesome</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostrar / Ocultar</translation> </message> <message> <location line="+8"/> <source>Unlock wallet</source> <translation>Desbloquear el monedero</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation>&amp;Bloquear monedero</translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation>Bloquear monedero</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Archivo</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>&amp;Configuración</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>A&amp;yuda</translation> </message> <message> <location line="+17"/> <source>Tabs toolbar</source> <translation>Barra de pestañas</translation> </message> <message> <location line="+46"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+58"/> <source>CoinAwesome client</source> <translation>Cliente CoinAwesome</translation> </message> <message numerus="yes"> <location line="+70"/> <source>%n active connection(s) to CoinAwesome network</source> <translation><numerusform>%n conexión activa a la red CoinAwesome</numerusform><numerusform>%n conexiones activas a la red CoinAwesome</numerusform></translation> </message> <message> <location line="+488"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation>Staking.&lt;br&gt;Su contribución es %1&lt;br&gt;Contribución de la red es %2&lt;br&gt;Tiempo esperado para ganar la recompensa es %3</translation> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation>No esta &quot;Staking&quot; porque monedera está bloqueada</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation>No esta &quot;Staking&quot; porque monedera está desconectada</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation>No esta &quot;Staking&quot; porque monedera está sincronizando</translation> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation>No esta &quot;Staking&quot; ya que no tiene monedas maduras</translation> </message> <message> <location line="-808"/> <source>&amp;Dashboard</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>&amp;Unlock Wallet...</source> <translation>&amp;Desbloquear Monedero...</translation> </message> <message> <location line="+273"/> <source>Up to date</source> <translation>Actualizado</translation> </message> <message> <location line="+43"/> <source>Catching up...</source> <translation>Actualizando...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Confirme tarifa de transacción</translation> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>Transacción enviada</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>Transacción entrante</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Fecha: %1 Cantidad: %2 Tipo: %3 Dirección: %4 </translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation>Manejar URI</translation> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid CoinAwesome address or malformed URI parameters.</source> <translation>URI no se puede analizar! Esto puede ser causado por una dirección de CoinAwesome no válida o parámetros de URI malformados.</translation> </message> <message> <location line="+9"/> <source>Wallet is &lt;b&gt;not encrypted&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>El monedero está &lt;b&gt;cifrado&lt;/b&gt; y actualmente &lt;b&gt;desbloqueado&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>El monedero está &lt;b&gt;cifrado&lt;/b&gt; y actualmente &lt;b&gt;bloqueado&lt;/b&gt;</translation> </message> <message> <location line="+24"/> <source>Backup Wallet</source> <translation>Copia de Seguridad de Monedero</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Data de Monedero (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Copia de Seguridad a fracasado</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hubo un error al tratar de salvar los datos de su monedero a la nueva ubicación.</translation> </message> <message numerus="yes"> <location line="+91"/> <source>%n second(s)</source> <translation><numerusform>%n segundo</numerusform><numerusform>%n segundos</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation><numerusform>%n minuto</numerusform><numerusform>%n minutos</numerusform></translation> </message> <message numerus="yes"> <location line="-429"/> <location line="+433"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation> </message> <message> <location line="-456"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+27"/> <location line="+433"/> <source>%n day(s)</source> <translation><numerusform>%n día</numerusform><numerusform>%n días</numerusform></translation> </message> <message numerus="yes"> <location line="-429"/> <location line="+6"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+0"/> <source>%1 and %2</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+0"/> <source>%n year(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+69"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="+324"/> <source>Not staking</source> <translation>No estás &quot;Staking&quot;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+104"/> <source>A fatal error occurred. CoinAwesome can no longer continue safely and will quit.</source> <translation>Se ha producido un error fatal. CoinAwesome ya no puede continuar de forma segura y cerrará.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+110"/> <source>Network Alert</source> <translation>Alerta de red</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation>Control de Moneda</translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>Cantidad:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Cantidad:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>Prioridad:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>Tasa:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Envío pequeño:</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+552"/> <source>no</source> <translation>no</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation>Después de tasas:</translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation>Cambio:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation>(des)marcar todos</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>Modo árbol</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>Modo lista</translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation>Cuantía</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+5"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Fecha</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>Confirmaciones</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Confirmado</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>Prioridad</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation>Copiar dirección</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Copiar cuantía</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>Copiar identificador de transacción</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation>Copiar cantidad</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>Copiar donación</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar después de aplicar donación</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioridad</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Copiar envío pequeño</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar cambio</translation> </message> <message> <location line="+317"/> <source>highest</source> <translation>lo más alto</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>alto</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>medio-alto</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation>medio</translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>bajo-medio</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>bajo</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>lo más bajo</translation> </message> <message> <location line="+155"/> <source>DUST</source> <translation>DUST</translation> </message> <message> <location line="+0"/> <source>yes</source> <translation>si</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation>Esta etiqueta se convierte en rojo, si el tamaño de la transacción es mayor que 10000 bytes. Esto significa que se requiere una cuota de al menos el %1 por kb. Puede variar + / - 1 Byte por entrada.</translation> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation>Las transacciones con mayor prioridad son más probables en entrar hacia un bloque. Esta etiqueta se convierte en rojo, si la prioridad es menor que &quot;medium&quot;. Esto significa que se requiere una cuota de al menos el %1 por kb.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation>Esta etiqueta se convierte en rojo, si cualquier recipiente recibe una cantidad menor que %1. Esto significa que se requiere una cuota de al menos %2. Las cantidades inferiores a 0.546 veces la cuota mínima del relé se muestran en forma de DUST.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation>Esta etiqueta se convierte en rojo, si el cambio es menor que %1. Esto significa que se requiere una cuota de al menos %2.</translation> </message> <message> <location line="+36"/> <location line="+66"/> <source>(no label)</source> <translation>(sin etiqueta)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation>Enviar desde %1 (%2)</translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(cambio)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editar Dirección</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>La etiqueta asociada con esta entrada de la libreta de direcciones</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Dirección</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>La dirección asociada con esta entrada de la libreta de direcciones. Esto sólo puede ser modificada para direcciones de envío.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nueva dirección de recepción</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nueva dirección de envío</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editar dirección de recepción</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editar dirección de envío</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>La dirección introducida &quot;%1&quot; ya está presente en la libreta de direcciones.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid CoinAwesome address.</source> <translation>La dirección introducida &quot;%1&quot; no es una dirección válida de CoinAwesome.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>No se pudo desbloquear el monedero.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ha fallado la generación de la nueva clave.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+426"/> <location line="+12"/> <source>CoinAwesome-Qt</source> <translation>CoinAwesome-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>Versión</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Uso:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>opciones de líneas de comandos</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>opciones del interfaz de usuario</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Defina el idioma, por ejemplo &quot;de_DE&quot; (predeterminado: región del sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Iniciar minimizado</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostrar pantalla de bienvenida al iniciar (predeterminado: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opciones</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation>Tarifa de transacción opcional por kB que ayuda a asegurarse de que sus transacciones se procesan rápidamente. La mayoría de las transacciones son 1 kB. Cuota de 0.01 recomendada.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Comisión de &amp;transacciones</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation>Cantidad reservada no participa en el &quot;Staking&quot; y por lo tanto se puede gastar en cualquier momento.</translation> </message> <message> <location line="+15"/> <source>Reserve</source> <translation>Reserva</translation> </message> <message> <location line="+31"/> <source>Automatically start CoinAwesome after logging in to the system.</source> <translation>Iniciar CoinAwesome automáticamente después de entrar en el sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Start CoinAwesome on system login</source> <translation>&amp;Iniciar CoinAwesome al inicio del sistema</translation> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;Red</translation> </message> <message> <location line="+6"/> <source>Automatically open the CoinAwesome client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Abrir automáticamente el puerto de cliente CoinAwesome en el router. Esto sólo funciona cuando el router es compatible con UPnP y está habilitado.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Mapear el puerto usando &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the CoinAwesome network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Conéctese a la red de CoinAwesome a través de un SOCKS proxy (e.g. cuando se conecta a través de Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Conéctese a través de un SOCKS proxy</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>Dirección &amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Dirección IP del proxy (e.g. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Puerto:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Puerto del servidor proxy (ej. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Versión SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versión SOCKS del proxy (ej. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Ventana</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Minimizar la ventana a la bandeja de iconos del sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimizar a la bandeja en vez de a la barra de tareas</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimizar en lugar de salir de la aplicación al cerrar la ventana. Cuando esta opción está activa, la aplicación solo se puede cerrar seleccionando Salir desde el menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimizar al cerrar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Interfaz</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>I&amp;dioma de la interfaz de usuario</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting CoinAwesome.</source> <translation>El idioma del interfaz de usuario se puede configurar aquí. Esta configuración se aplicará después de reiniciar CoinAwesome.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>Mostrar las cantidades en la &amp;unidad:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Elegir la subdivisión predeterminada para mostrar cantidades en la interfaz y cuando se envían bitcoins.</translation> </message> <message> <location line="+9"/> <source>Whether to show coin control features or not.</source> <translation>Mostrar o no funcionalidad de Coin Control</translation> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation>Mostrar moneda y Coin Control (expertos solamente!)</translation> </message> <message> <location line="+7"/> <source>Whether to select the coin outputs randomly or with minimal coin age.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Minimize weight consumption (experimental)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use black visual theme (requires restart)</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;Aceptar</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancelar</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Aplicar</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>predeterminado</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation>Advertencia</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting CoinAwesome.</source> <translation>Esta configuración se aplicará después de reiniciar CoinAwesome.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>La dirección proxy indicada es inválida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Desde</translation> </message> <message> <location line="+46"/> <location line="+247"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the CoinAwesome network after a connection is established, but this process has not completed yet.</source> <translation>La información mostrada puede estar fuera de fecha. Su monedera se sincroniza automáticamente con la red CoinAwesome después de que se establece una conexión, pero este proceso no se ha completado todavía.</translation> </message> <message> <location line="-173"/> <source>Stake:</source> <translation>Stake:</translation> </message> <message> <location line="+32"/> <source>Unconfirmed:</source> <translation>Sin confirmar:</translation> </message> <message> <location line="-113"/> <source>Wallet</source> <translation>Monedero</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation>Disponible:</translation> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation>Su actual balance disponible</translation> </message> <message> <location line="+80"/> <source>Immature:</source> <translation>No disponible:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Saldo recién minado que aún no está disponible.</translation> </message> <message> <location line="+23"/> <source>Total:</source> <translation>Total:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation>Su balance actual total</translation> </message> <message> <location line="+50"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Movimientos recientes&lt;/b&gt;</translation> </message> <message> <location line="-118"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total de transacciones que aún no se han confirmado, y aún no cuentan para el balance actual</translation> </message> <message> <location line="-32"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation>Total de las monedas que fueron &quot;Staked&quot;, y aún no cuentan para el balance actual</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>desincronizado</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start coinawesome: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Código Diálogo QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Solicitar Pago</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Cantidad:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etiqueta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Mensaje:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Guardar como....</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error codificando URI en código QR​​.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>La cantidad introducida es inválida, compruebe por favor.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>La URI es demasiado larga, pruebe a acortar el texto para la etiqueta / mensaje</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Guardar código QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Imagenes PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nombre del cliente</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <source>N/A</source> <translation>N/D</translation> </message> <message> <location line="-194"/> <source>Client version</source> <translation>Versión del cliente</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Información</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Utilizando la versión OpenSSL</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Hora de inicio</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Red</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Número de conexiones</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>En testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Cadena de bloques</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Número actual de bloques</translation> </message> <message> <location line="+197"/> <source>&amp;Network Traffic</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Clear</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Totals</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>In:</source> <translation type="unfinished"/> </message> <message> <location line="+80"/> <source>Out:</source> <translation type="unfinished"/> </message> <message> <location line="-383"/> <source>Last block time</source> <translation>Hora del último bloque</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Abrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Opciones de línea de comandos</translation> </message> <message> <location line="+7"/> <source>Show the CoinAwesome-Qt help message to get a list with possible CoinAwesome command-line options.</source> <translation>Mostrar el mensaje de ayuda de CoinAwesome-Qt para obtener una lista con las posibles opciones de línea de comandos para CoinAwesome.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostrar</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-237"/> <source>Build date</source> <translation>Fecha de compilación</translation> </message> <message> <location line="-104"/> <source>CoinAwesome - Debug window</source> <translation>CoinAwesome - Ventana de depuración</translation> </message> <message> <location line="+25"/> <source>CoinAwesome Core</source> <translation>Núcleo CoinAwesome</translation> </message> <message> <location line="+256"/> <source>Debug log file</source> <translation>Archivo de registro de depuración</translation> </message> <message> <location line="+7"/> <source>Open the CoinAwesome debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Abre el archivo de registro de CoinAwesome del directorio de datos actual. Esto puede tardar algunos segundos para archivos grandes.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Borrar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="+325"/> <source>Welcome to the CoinAwesome RPC console.</source> <translation>Bienvenido a la consola RPC de CoinAwesome</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Use las flechas arriba y abajo para navegar por el historial y &lt;b&gt;Control+L&lt;/b&gt; para vaciar la pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escriba &lt;b&gt;help&lt;/b&gt; para ver un resumen de los comandos disponibles.</translation> </message> <message> <location line="+127"/> <source>%1 B</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 KB</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 MB</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 GB</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>%1 m</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>%1 h</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 h %2 m</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+181"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Enviar bitcoins</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation>Características de Coin Control</translation> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation>Entradas...</translation> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation>Seleccionado automáticamente</translation> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation>Fondos insuficientes!</translation> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation>Cantidad:</translation> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation>0</translation> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+51"/> <source>Amount:</source> <translation>Cuantía:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 AWE</source> <translation>123.456 AWE {0.00 ?}</translation> </message> <message> <location line="-191"/> <source>Priority:</source> <translation>Prioridad:</translation> </message> <message> <location line="+19"/> <source>medium</source> <translation>medio</translation> </message> <message> <location line="+32"/> <source>Fee:</source> <translation>Tasa:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Envío pequeño:</translation> </message> <message> <location line="+19"/> <source>no</source> <translation>no</translation> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation>Después de tasas:</translation> </message> <message> <location line="+35"/> <source>Change</source> <translation>Cambio</translation> </message> <message> <location line="+50"/> <source>custom change address</source> <translation>dirección de cambio personalizada</translation> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>Enviar a múltiples destinatarios de una vez</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Añadir &amp;destinatario</translation> </message> <message> <location line="+16"/> <source>Remove all transaction fields</source> <translation>Elimina todos los campos de transacciones</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Vaciar &amp;todo</translation> </message> <message> <location line="+24"/> <source>Balance:</source> <translation>Saldo:</translation> </message> <message> <location line="+16"/> <source>123.456 AWE</source> <translation>123.456 AWE</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmar el envío</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Enviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-173"/> <source>Enter a CoinAwesome address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>Introduce una dirección CoinAwesome (p.ej. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation>Copiar cantidad</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar cuantía</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation>Copiar donación</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar después de aplicar donación</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioridad</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Copiar envío pequeño</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar Cambio</translation> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; para %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar el envío de bitcoins</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Estás seguro que quieres enviar %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>y</translation> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation>La dirección de recepción no es válida, compruébela de nuevo.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>La cantidad por pagar tiene que ser mayor de 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>La cantidad sobrepasa su saldo.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>El total sobrepasa su saldo cuando se incluye la tasa de envío de %1</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Se ha encontrado una dirección duplicada. Solo se puede enviar a cada dirección una vez por operación de envío.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacción ha sido rechazada. Esto puede ocurrir si algunas de sus monedas en el monedero ya se gastaron, por ejemplo, si se usa una copia del wallet.dat y se gastaron las monedas de la copia pero no se han marcado como gastadas aquí.</translation> </message> <message> <location line="+247"/> <source>WARNING: Invalid CoinAwesome address</source> <translation>ADVERTENCIA: Dirección CoinAwesome inválida</translation> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(sin etiqueta)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation>ADVERTENCIA: dirección de cambio desconocida</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulario</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Ca&amp;ntidad:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Pagar a:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Etiquete esta dirección para añadirla a la libreta</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Elije dirección de la libreta de direcciones</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Pegar dirección desde portapapeles</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Elimina este beneficiario</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a CoinAwesome address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>Introduce una dirección CoinAwesome (p.ej. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Firmas - Firmar / verificar un mensaje</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>&amp;Firmar mensaje</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Puede firmar mensajes con sus direcciones para demostrar que las posee. Tenga cuidado de no firmar cualquier cosa vaga, ya que los ataques de phishing pueden tratar de engañarle para suplantar su identidad. Firme solo declaraciones totalmente detalladas con las que usted esté de acuerdo.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>La dirección a firmar con un mensaje (p.ej. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation>Elije una dirección de la libreta de direcciones</translation> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>Pegar dirección desde portapapeles</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introduzca el mensaje que desea firmar aquí</translation> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar la firma actual al portapapeles del sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this CoinAwesome address</source> <translation>Firma el mensaje para demostrar que posees esta dirección CoinAwesome.</translation> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation>Vaciar todos los campos de la firma de mensaje</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Vaciar &amp;todo</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar mensaje</translation> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introduzca la dirección para la firma, el mensaje (asegurándose de copiar tal cual los saltos de línea, espacios, tabulaciones, etc.) y la firma a continuación para verificar el mensaje. Tenga cuidado de no asumir más información de lo que dice el propio mensaje firmado para evitar fraudes basados en ataques de tipo man-in-the-middle.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>La dirección firmada con un mensaje (p.ej. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified CoinAwesome address</source> <translation>Verifique el mensaje para asegurarse de que se firmó con la dirección CoinAwesome especificada.</translation> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation>Vaciar todos los campos de la verificación de mensaje</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a CoinAwesome address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>Introduce una dirección CoinAwesome (p.ej B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Haga clic en &quot;Firmar mensaje&quot; para generar la firma</translation> </message> <message> <location line="+3"/> <source>Enter CoinAwesome signature</source> <translation>Introduce la firma CoinAwesome</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>La dirección introducida es inválida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Verifique la dirección e inténtelo de nuevo.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>La dirección introducida no corresponde a una clave.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Se ha cancelado el desbloqueo del monedero. </translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>No se dispone de la clave privada para la dirección introducida.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Ha fallado la firma del mensaje.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Mensaje firmado.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>No se puede decodificar la firma.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Compruebe la firma e inténtelo de nuevo.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La firma no coincide con el resumen del mensaje.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>La verificación del mensaje ha fallado.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Mensaje verificado.</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <location filename="../trafficgraphwidget.cpp" line="+75"/> <source>KB/s</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+25"/> <source>Open until %1</source> <translation>Abierto hasta %1</translation> </message> <message> <location line="+6"/> <source>conflicted</source> <translation>en conflicto</translation> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/fuera de línea</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/no confirmado</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confirmaciones</translation> </message> <message> <location line="+17"/> <source>Status</source> <translation>Estado</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, transmitir a través de %n nodo</numerusform><numerusform>, transmitir a través de %n nodos</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Fecha</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Fuente</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generado</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>De</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Para</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>dirección propia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crédito</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>disponible en %n bloque más</numerusform><numerusform>disponible en %n bloques más</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>no aceptada</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Débito</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Comisión de transacción</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Cantidad neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Mensaje</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentario</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>Identificador de transacción</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Las monedas generadas deben madurar 510 bloques antes de que puedan gastarse. Cuando generaste este bloque, este fue transmitido a la red para ser añadido a la cadena de bloques. Si falla al introducirse en la cadena, su estado cambiará a &quot;no aceptado&quot; y no se podrá gastar. Esto ocasionalmente puede ocurrir si otro nodo genera un bloque a unos segundos que el tuyo.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Información de depuración</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacción</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>entradas</translation> </message> <message> <location line="+21"/> <source>Amount</source> <translation>Cantidad</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>verdadero</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>falso</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, todavía no se ha sido difundido satisfactoriamente</translation> </message> <message numerus="yes"> <location line="-36"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+71"/> <source>unknown</source> <translation>desconocido</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detalles de transacción</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Esta ventana muestra información detallada sobre la transacción</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+231"/> <source>Date</source> <translation>Fecha</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipo</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Cantidad</translation> </message> <message> <location line="+52"/> <source>Open until %1</source> <translation>Abierto hasta %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmado (%1 confirmaciones)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation><numerusform>Abrir para %n bloque más</numerusform><numerusform>Abrir para %n bloques más</numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation>Sin conexión</translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation>Sin confirmar</translation> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Confirmando (%1 de %2 confirmaciones recomendadas)</translation> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation>En conflicto</translation> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>No vencidos (%1 confirmaciones. Estarán disponibles al cabo de %2)</translation> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Este bloque no ha sido recibido por otros nodos y probablemente no sea aceptado!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generado pero no aceptado</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>Recibido con</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Recibidos de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviado a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pago propio</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minado</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(nd)</translation> </message> <message> <location line="+194"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estado de transacción. Pasa el ratón sobre este campo para ver el número de confirmaciones.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Fecha y hora en que se recibió la transacción.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipo de transacción.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Dirección de destino de la transacción.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Cantidad retirada o añadida al saldo.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+54"/> <location line="+17"/> <source>All</source> <translation>Todo</translation> </message> <message> <location line="-16"/> <source>Today</source> <translation>Hoy</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Esta semana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Este mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Mes pasado</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Este año</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rango...</translation> </message> <message> <location line="+12"/> <source>Received with</source> <translation>Recibido con</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviado a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A usted mismo</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minado</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Otra</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introduzca una dirección o etiqueta que buscar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Cantidad mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar dirección</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar cuantía</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar identificador de transacción</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editar etiqueta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Mostrar detalles de la transacción</translation> </message> <message> <location line="+138"/> <source>Export Transaction Data</source> <translation>Exportar datos de transacción</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Archivos de columnas separadas por coma (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmado</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Fecha</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipo</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Cantidad</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error al exportar</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No se puede escribir en el archivo %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rango:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>para</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+208"/> <source>Sending...</source> <translation>Enviando...</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+173"/> <source>CoinAwesome version</source> <translation>versión CoinAwesome</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Uso:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or coinawesomed</source> <translation>Envía un comando a -server o coinawesomed</translation> </message> <message> <location line="+1"/> <source>List commands</source> <translation>Muestra comandos </translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Recibir ayuda para un comando </translation> </message> <message> <location line="-147"/> <source>Options:</source> <translation>Opciones: </translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: coinawesome.conf)</source> <translation>Especifica un archivo de configuración (por defecto: coinawesome.conf)</translation> </message> <message> <location line="+1"/> <source>Specify pid file (default: coinawesomed.pid)</source> <translation>Especifica un archivo pid (por defecto: coinawesomed.pid)</translation> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>Especificar archivo de monedero (dentro del directorio de datos)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directorio para los datos</translation> </message> <message> <location line="-25"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=coinawesomerpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;CoinAwesome Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Establecer el tamaño de caché de la base de datos en megabytes (predeterminado: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>Ajusta el tamaño de la base de datos del registro en megabytes (por defecto: 100)</translation> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 51225 or testnet: 25714)</source> <translation>Escuchando conexiones en el puerto &lt;port&gt; (por defecto: 51225 o testnet: 25714)</translation> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantener como máximo &lt;n&gt; conexiones a pares (predeterminado: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Conectar a un nodo para obtener direcciones de pares y desconectar</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Especifique su propia dirección pública</translation> </message> <message> <location line="+4"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation>Enlazar a la dirección dada. Utilice la notación [host]:puerto para IPv6</translation> </message> <message> <location line="+1"/> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Always query for peer addresses via DNS lookup (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Umbral para la desconexión de pares con mal comportamiento (predeterminado: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Número de segundos en que se evita la reconexión de pares con mal comportamiento (predeterminado: 86400)</translation> </message> <message> <location line="-37"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ha ocurrido un error al configurar el puerto RPC %u para escucha en IPv4: %s</translation> </message> <message> <location line="+65"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 15715 or testnet: 25715)</source> <translation>Escuchar conexiones JSON-RPC en &lt;port&gt; (predeterminado: 15715 o testnet: 25715)</translation> </message> <message> <location line="-17"/> <source>Accept command line and JSON-RPC commands</source> <translation>Aceptar comandos consola y JSON-RPC </translation> </message> <message> <location line="+1"/> <source>Run in the background as a daemon and accept commands</source> <translation>Ejecutar en segundo plano como daemon y aceptar comandos </translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Usar la red de pruebas </translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar conexiones desde el exterior (predeterminado: 1 si no -proxy o -connect)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ha ocurrido un error al configurar el puerto RPC %u para escuchar mediante IPv6. Recurriendo a IPv4: %s</translation> </message> <message> <location line="+96"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Establecer el tamaño máximo de las transacciones alta-prioridad/baja-comisión en bytes (por defecto: 27000)</translation> </message> <message> <location line="+12"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Aviso: ¡-paytxfee tiene un valor muy alto! Esta es la comisión que pagará si envía una transacción.</translation> </message> <message> <location line="-103"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong CoinAwesome will not work properly.</source> <translation>Advertencia: Verifique que la fecha y hora del equipo sean correctas! Si su reloj es erróneo CoinAwesome no funcionará correctamente.</translation> </message> <message> <location line="+132"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Aviso: ¡Error al leer wallet.dat! Todas las claves se han leído correctamente, pero podrían faltar o ser incorrectos los datos de transacciones o las entradas de la libreta de direcciones.</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Aviso: ¡Recuperados datos de wallet.dat corrupto! El wallet.dat original se ha guardado como wallet.{timestamp}.bak en %s; si hubiera errores en su saldo o transacciones, deberá restaurar una copia de seguridad.</translation> </message> <message> <location line="-31"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Intento de recuperar claves privadas de un wallet.dat corrupto</translation> </message> <message> <location line="+5"/> <source>Block creation options:</source> <translation>Opciones de creación de bloques:</translation> </message> <message> <location line="-69"/> <source>Connect only to the specified node(s)</source> <translation>Conectar sólo a los nodos (o nodo) especificados</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descubrir dirección IP propia (predeterminado: 1 al escuchar sin -externalip)</translation> </message> <message> <location line="+101"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Ha fallado la escucha en todos los puertos. Use -listen=0 si desea esto.</translation> </message> <message> <location line="-91"/> <source>Sync checkpoints policy (default: strict)</source> <translation>Política de puntos de control de sincronización (por defecto: estricta)</translation> </message> <message> <location line="+89"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Dirección -tor inválida: &apos;%s&apos;</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Cantidad no válida para -reservebalance=&lt;amount&gt;</translation> </message> <message> <location line="-88"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Búfer de recepción máximo por conexión, &lt;n&gt;*1000 bytes (predeterminado: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Búfer de recepción máximo por conexión, , &lt;n&gt;*1000 bytes (predeterminado: 1000)</translation> </message> <message> <location line="-17"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Conectarse solo a nodos de la red &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+31"/> <source>Prepend debug output with timestamp</source> <translation>Prefijar salida de depuración con marca de tiempo</translation> </message> <message> <location line="+41"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>Opciones SSL: (ver la Bitcoin Wiki para instrucciones de configuración SSL)</translation> </message> <message> <location line="-81"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Selecciona la versión de socks proxy a usar (4-5, por defecto: 5)</translation> </message> <message> <location line="+42"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar información de trazas/depuración a la consola en lugar de al archivo debug.log</translation> </message> <message> <location line="+5"/> <source>Send trace/debug info to debugger</source> <translation>Enviar información de rastreo / depurado al depurador</translation> </message> <message> <location line="+30"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Establecer el tamaño máximo de bloque en bytes (por defecto: 250000)</translation> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Establecer tamaño mínimo de bloque en bytes (predeterminado: 0)</translation> </message> <message> <location line="-35"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Reducir el archivo debug.log al iniciar el cliente (predeterminado: 1 sin -debug)</translation> </message> <message> <location line="-43"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar el tiempo máximo de conexión en milisegundos (predeterminado: 5000)</translation> </message> <message> <location line="+116"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation>No es posible firmar el punto de control, clave de punto de control incorrecta?%s</translation> </message> <message> <location line="-86"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Usar UPnP para asignar el puerto de escucha (predeterminado: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Usar UPnP para asignar el puerto de escucha (predeterminado: 1 al escuchar)</translation> </message> <message> <location line="-26"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Usar proxy para alcanzar a ver los servicios ocultos (por defecto: los mismos que -proxy)</translation> </message> <message> <location line="+47"/> <source>Username for JSON-RPC connections</source> <translation>Nombre de usuario para las conexiones JSON-RPC </translation> </message> <message> <location line="+51"/> <source>Verifying database integrity...</source> <translation>Verificando la integridad de la base de datos...</translation> </message> <message> <location line="+44"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation>ADVERTENCIA: violación de un punto de control sincronizado detectada, se saltara!</translation> </message> <message> <location line="-1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Aviso: Esta versión es obsoleta, actualización necesaria!</translation> </message> <message> <location line="-54"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat corrupto. Ha fallado la recuperación.</translation> </message> <message> <location line="-56"/> <source>Password for JSON-RPC connections</source> <translation>Contraseña para las conexiones JSON-RPC </translation> </message> <message> <location line="-32"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation>Sincronizar el tiempo con otros nodos. Desactivar si el tiempo en su sistema es preciso, por ejemplo si usa sincronización con NTP (por defecto: 1)</translation> </message> <message> <location line="+13"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation>Al crear transacciones, ignorar las entradas con valor inferior a esta (por defecto: 0.01)</translation> </message> <message> <location line="+6"/> <source>Output debugging information (default: 0, supplying &lt;category&gt; is optional)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&lt;category&gt; can be:</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permitir conexiones JSON-RPC desde la dirección IP especificada </translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar comando al nodo situado en &lt;ip&gt; (predeterminado: 127.0.0.1) </translation> </message> <message> <location line="+1"/> <source>Wait for RPC server to start</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Ejecutar un comando cuando cambia el mejor bloque (%s en cmd se sustituye por el hash de bloque)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Ejecutar comando cuando una transacción del monedero cambia (%s en cmd se remplazará por TxID)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation>Requerir confirmaciones para cambio (por defecto: 0)</translation> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Ejecutar comando cuando una alerta relevante sea recibida (%s en la linea de comandos es reemplazado por un mensaje)</translation> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Actualizar el monedero al último formato</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Ajustar el número de claves en reserva &lt;n&gt; (predeterminado: 100) </translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Volver a examinar la cadena de bloques en busca de transacciones del monedero perdidas</translation> </message> <message> <location line="+3"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>Cómo de minuciosa es la verificación del bloque (0-6, por defecto: 1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation>Importar bloques desde el archivo externo blk000?.dat</translation> </message> <message> <location line="+9"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Usar OpenSSL (https) para las conexiones JSON-RPC </translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Certificado del servidor (predeterminado: server.cert) </translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clave privada del servidor (predeterminado: server.pem) </translation> </message> <message> <location line="+10"/> <source>Initialization sanity check failed. CoinAwesome is shutting down.</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation>Error: Monedero desbloqueado sólo para hacer &quot;stake&quot;, no es posible crear una transacción.</translation> </message> <message> <location line="+17"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation>ADVERTENCIA: Punto de control no válido encontrado! Las transacciones que se muestran pueden no ser correctas! Puede que tenga que actualizar o notificar a los desarrolladores.</translation> </message> <message> <location line="-174"/> <source>This help message</source> <translation>Este mensaje de ayuda </translation> </message> <message> <location line="+104"/> <source>Wallet %s resides outside data directory %s.</source> <translation>El monedero %s reside fuera del directorio de datos %s.</translation> </message> <message> <location line="+37"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>No es posible conectar con %s en este sistema (bind ha dado el error %d, %s)</translation> </message> <message> <location line="-133"/> <source>Connect through socks proxy</source> <translation>Conecte a través del socks proxy</translation> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permitir búsquedas DNS para -addnode, -seednode y -connect</translation> </message> <message> <location line="+126"/> <source>Loading addresses...</source> <translation>Cargando direcciones...</translation> </message> <message> <location line="-12"/> <source>Error loading blkindex.dat</source> <translation>Error al cargar blkindex.dat</translation> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error al cargar wallet.dat: el monedero está dañado</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of CoinAwesome</source> <translation>Error cargando wallet.dat: El monedero requiere una nueva versión de CoinAwesome</translation> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart CoinAwesome to complete</source> <translation>El monedero necesita ser reescrito: reinicie CoinAwesome para completar</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Error al cargar wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Dirección -proxy inválida: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>La red especificada en -onlynet &apos;%s&apos; es desconocida</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Solicitada versión de proxy -socks desconocida: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>No se puede resolver la dirección de -bind: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>No se puede resolver la dirección de -externalip: &apos;%s&apos;</translation> </message> <message> <location line="-23"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Cantidad inválida para -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+60"/> <source>Sending...</source> <translation>Enviando...</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Cuantía no válida</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Fondos insuficientes</translation> </message> <message> <location line="-40"/> <source>Loading block index...</source> <translation>Cargando el índice de bloques...</translation> </message> <message> <location line="-110"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Añadir un nodo al que conectarse y tratar de mantener la conexión abierta</translation> </message> <message> <location line="+125"/> <source>Unable to bind to %s on this computer. CoinAwesome is probably already running.</source> <translation>No se puede enlazar a %s en este equipo. CoinAwesome probablemente ya esté en funcionamiento.</translation> </message> <message> <location line="-101"/> <source>Fee per KB to add to transactions you send</source> <translation>Comisión por KB a añadir a las transacciones que envía</translation> </message> <message> <location line="+34"/> <source>Minimize weight consumption (experimental) (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>How many blocks to check at startup (default: 500, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Keep at most &lt;n&gt; unconnectable blocks in memory (default: %u)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Cantidad no válida para -mininput=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. CoinAwesome is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Loading wallet...</source> <translation>Cargando monedero...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>No se puede rebajar el monedero</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>No se puede escribir la dirección predeterminada</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Reexplorando...</translation> </message> <message> <location line="+2"/> <source>Done loading</source> <translation>Generado pero no aceptado</translation> </message> <message> <location line="-161"/> <source>To use the %s option</source> <translation>Para utilizar la opción %s</translation> </message> <message> <location line="+188"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="-18"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Tiene que establecer rpcpassword=&lt;contraseña&gt; en el fichero de configuración: ⏎ %s ⏎ Si el archivo no existe, créelo con permiso de lectura solamente del propietario.</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>nginx.py<|end_file_name|><|fim▁begin|>from flask import render_template class NginxConfigRenderer(): def __init__(self, manifold): self.manifold = manifold self.app = manifold.app def render(self, minions): with self.app.app_context(): return render_template('nginx/nginx.conf', manifold=self.manifold, minions=minions)<|fim▁hole|> content = self.render(minions) conf_path = self.manifold.config.NGINX_CONF_PATH with open(conf_path, 'w') as f: f.write(content)<|fim▁end|>
def write(self, minions):
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pymongo import MongoClient from dalmongo import configuration<|fim▁hole|> # get the instance of MongoDB client client = MongoClient(configuration.MONGODB_HOST, configuration.MONGODB_PORT) # get the main application database db = getattr(client, configuration.MONGODB_NAME)<|fim▁end|>
<|file_name|>d3.js<|end_file_name|><|fim▁begin|>'use strict'; angular.module('aurea') .directive('d3Bars', function ($window, $timeout, d3Service) { return { restrict: 'EA', scope: { data: '=', onClick: '&' }, link: function (scope, ele, attrs) { d3Service.d3().then(function (d3) { var margin = parseInt(attrs.margin) || 20, barHeight = parseInt(attrs.barHeight) || 20, barPadding = parseInt(attrs.barPadding) || 5; var svg = d3.select(ele[0]) .append('svg') .style('width', '100%'); // Browser onresize event window.onresize = function () { scope.$apply(); }; // Watch for resize event<|fim▁hole|> }); scope.$watch('data', function (newData) { scope.render(newData); }, true); scope.render = function (data) { // remove all previous items before render svg.selectAll('*').remove(); // If we don't pass any data, return out of the element if (!data) return; // setup variables var width = d3.select(ele[0]).node().offsetWidth - margin, // calculate the height height = scope.data.length * (barHeight + barPadding), // Use the category20() scale function for multicolor support color = d3.scale.category20(), // our xScale xScale = d3.scale.linear() .domain([0, 31]) .range([0, width]); // set the height based on the calculations above svg.attr('height', height); //create the rectangles for the bar chart svg.selectAll('rect') .data(data).enter() .append('rect') .attr('height', barHeight) .attr('width', 140) .attr('x', 110) .attr('y', function (d, i) { return i * (barHeight + barPadding); }) .attr('fill', function (d) { return color(d.value); }) .attr('width', function (d) { return xScale(d.value); }); var baseSelection = svg.selectAll('text'); baseSelection .data(data) .enter() .append('text') .attr('font-family', 'monospace') .attr('fill', '#000') .attr('y', function (d, i) { return i * (barHeight + barPadding) + 15; }) .attr('x', 15) .text(function (d) { return d.name; }); baseSelection .data(data) .enter() .append('text') .attr('font-family', 'monospace') .attr('fill', '#fff') .attr('y', function (d, i) { return i * (barHeight + barPadding) + 15; }) .attr('x', 114) .text(function (d) { return d.value > 0 ? d.value : ''; }); }; }); } }; });<|fim▁end|>
scope.$watch(function () { return angular.element($window)[0].innerWidth; }, function () { scope.render(scope.data);
<|file_name|>component_test.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= # Lint as: python3 """Tests for nitroml.automl.metalearning.metalearner.component.""" from absl.testing import absltest from nitroml.automl.metalearning import artifacts from nitroml.automl.metalearning.metalearner.component import MetaLearner from tfx.types import channel_utils from tfx.types import standard_artifacts class ComponentTest(absltest.TestCase):<|fim▁hole|> num_train = 5 self.meta_train_data = {} for ix in range(num_train): self.meta_train_data[f'hparams_train_{ix}'] = channel_utils.as_channel( [standard_artifacts.HyperParameters()]) self.meta_train_data[ f'meta_train_features_{ix}'] = channel_utils.as_channel( [artifacts.MetaFeatures()]) self.custom_config = {'some': 'thing', 'some other': 1, 'thing': 2} def testConstructWithMajorityVoting(self): metalearner = MetaLearner( algorithm='majority_voting', custom_config=self.custom_config, **self.meta_train_data) self.assertEqual(artifacts.KCandidateHyperParameters.TYPE_NAME, metalearner.outputs['output_hyperparameters'].type_name) self.assertEqual(standard_artifacts.Model.TYPE_NAME, metalearner.outputs['metamodel'].type_name) def testConstructWithNearestNeighbor(self): metalearner = MetaLearner( algorithm='nearest_neighbor', custom_config=self.custom_config, **self.meta_train_data) self.assertEqual(artifacts.KCandidateHyperParameters.TYPE_NAME, metalearner.outputs['output_hyperparameters'].type_name) self.assertEqual(standard_artifacts.Model.TYPE_NAME, metalearner.outputs['metamodel'].type_name) if __name__ == '__main__': absltest.main()<|fim▁end|>
def setUp(self): super(ComponentTest, self).setUp()
<|file_name|>displayable_path.rs<|end_file_name|><|fim▁begin|>use std::fmt; use std::path::PathBuf; // Newtype to make a standard path buffer presentable (via the Display<|fim▁hole|>// trait), which is required for any type used in search/select mode. pub struct DisplayablePath(pub PathBuf); impl fmt::Display for DisplayablePath { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let &DisplayablePath(ref path) = self; write!(f, "{}", path.to_string_lossy()) } }<|fim▁end|>
<|file_name|>calculate-element-wise.cc<|end_file_name|><|fim▁begin|>/* * Medical Image Registration ToolKit (MIRTK) * * Copyright 2015-2017 Imperial College London * Copyright 2015-2017 Andreas Schuh * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "mirtk/Common.h" #include "mirtk/Options.h" #include "mirtk/ImageConfig.h" #include "mirtk/IOConfig.h" #include "mirtk/DataOp.h" #include "mirtk/DataStatistics.h" #include "mirtk/DataFunctions.h" #if MIRTK_Image_WITH_VTK #include "vtkDataSet.h" #include "vtkSmartPointer.h" #include "vtkPointData.h" #include "vtkCellData.h" #include "vtkDataArray.h" #endif using namespace mirtk; using namespace mirtk::data; using namespace mirtk::data::op; using namespace mirtk::data::statistic; // ============================================================================= // Help // ============================================================================= // ----------------------------------------------------------------------------- void PrintHelp(const char *name) { cout << "\n"; cout << "Usage: " << name << " <input> [options]\n"; cout << "\n"; cout << "Description:\n"; cout << " This tool can be used for basic calculations from a sequence of data values read\n"; cout << " either from an image or a VTK pointset. It can be used, for example, to add two\n"; cout << " data sequences and to divide the result by a constant. The current sequence can\n"; cout << " be written to an output file again using :option:`-out`. Additionally, statistics\n"; cout << " of the current data sequence can be computed such as the mean or variance.\n"; cout << " The order of the data transformations and calculation of statistics is determined\n"; cout << " by the order of the command-line arguments.\n"; cout << "\n"; cout << " The data mask is used to include/exclude values from subsequent operations.\n"; cout << " Initially, all NaN values in the input data sequence are excluded.\n"; cout << " Further values can be excluded using one or more of the masking operations.\n"; cout << " Using the mask, operations can be performed on only a subset of the data,\n"; cout << " and the mask then reset using :option:`-reset-mask`.\n"; cout << "\n"; cout << " By default, data statistics are printed to STDOUT in a human readable format.\n"; cout << " This output can be appended to a text file using :option:`-append` instead.\n"; cout << " For a more machine readable output, e.g., as comma separated values (CSV),\n"; cout << " specify a delimiting string using :option:`-delimiter`. In this case, a header\n"; cout << " line is also printed when :option:`-header` is given with optional user\n"; cout << " specified column names for the individual output values.\n"; cout << "\n"; cout << "Input options:\n"; cout << " -pd, -point-data, -scalars <name> Name of input point data array. (default: active SCALARS array)\n"; cout << " -cd, -cell-data <name> Name of input cell data array. Overrides :option:`-pd`.\n"; cout << "\n"; cout << "Data masking options:\n"; cout << " -even\n"; cout << " Exclude values which are not an even number when cast to an integer.\n"; cout << " -odd\n"; cout << " Exclude values which are not an odd number when cast to an integer.\n"; cout << " -label <value|lower..upper>...\n"; cout << " Include data points with a value equal to either one of the given values.\n"; cout << " Closed intervals of values can be specified as \"lower..upper\".\n"; cout << " For example, \"-label 1 3 5..6 10 20..50\". This option is a shorthand for\n"; cout << " :option:`-mask-all` :option:`-threshold-inside` <lower> <upper> :option:`-invert-mask`\n"; cout << " where one :option:`-threshold-inside` operation is performed for each argument.\n"; cout << " -mask <value>... | <file> [<scalars>] [<value>]\n"; cout << " Exclude values equal a given threshold or with specified input mask <value>.\n"; cout << " The default mask value of values to be excluded is zero. When the input file\n"; cout << " is a point set file (e.g., .vtk, .vtp), the optional <scalars> argument can be\n"; cout << " used to specify the name of the point/cell data array to use as mask.\n"; cout << " Note that this operation does not modify the data values, but only marks them\n"; cout << " to be ignored from now on. Use :option:`-pad` following this operation to\n"; cout << " replace these values by a constant background value.\n"; cout << " -mask-all\n"; cout << " Exclude all values.\n"; cout << " -reset-mask\n"; cout << " Reset mask to include all values again.\n"; cout << " -invert-mask\n"; cout << " Invert mask to include all values that where excluded before and\n"; cout << " exclude all values that were included before.\n"; cout << " -set, -inside <value>\n"; cout << " Set new value for all currently included data values.\n"; cout << " -pad, -outside <value>\n"; cout << " Set new value for all currently excluded data values.\n"; cout << "\n"; cout << "Data thresholding options:\n"; cout << " -threshold <lower> [<upper>]\n"; cout << " This masking operation is equivalent to :option:`-threshold-outside`.\n"; cout << " When no upper threshold is specified, it defaults to +inf. Therefore,\n"; cout << " \"-threshold 0\" will exclude all negative values.\n"; cout << " -percentile-threshold, -pct-threshold <lower>\n"; cout << " This masking operation is equivalent to :option:`-threshold-outside-percentiles`.\n"; cout << " with an upper threshold of +inf. Therefore, \"-threshold 0\" excludes all negative values.\n"; cout << " -threshold-percentiles, -threshold-pcts <lower> <upper>\n"; cout << " This masking operation is equivalent to :option:`-threshold-outside-percentiles`.\n"; cout << " -threshold-inside, -mask-inside <lower> <upper>\n"; cout << " Exclude values which are inside a given closed interval.\n"; cout << " When the lower threshold is greater than the upper threshold,\n"; cout << " values less than or equal to the upper threshold and values greater\n"; cout << " than or equal to the lower threshold are excluded.\n"; cout << " -threshold-inside-percentiles, -threshold-inside-pcts, -mask-inside-percentiles, -mask-inside-pct <lower> <upper>\n"; cout << " Exclude values which are inside a given closed interval of percentiles.\n"; cout << " When the lower percentile is greater than the upper percentile,\n"; cout << " values less than or equal to the upper percentile and values greater\n"; cout << " than or equal to the lower percentile are excluded.\n"; cout << " -threshold-outside, -mask-outside <lower> <upper>\n"; cout << " Exclude values which are outside a given open interval.\n"; cout << " When the lower threshold is greater than the upper threshold,\n"; cout << " values inside the closed interval <upper>..<lower> are excluded.\n"; cout << " -threshold-outside-percentiles, -threshold-outside-pcts, -mask-outside-percentiles, -mask-outside-pcts <lower> <upper>\n"; cout << " Exclude values which are outside a given open interval of percentiles.\n"; cout << " When the lower percentile is greater than the upper percentile,\n"; cout << " values inside the closed interval <upper>..<lower> are excluded.\n"; cout << " -threshold-lt, -lower-threshold, -mask-lt <value>\n"; cout << " Exclude values less than a given threshold.\n"; cout << " -threshold-lt-percentile, -threshold-lt-pct, -lower-percentile-threshold, -lower-pct-threshold, -mask-lt-percentile, -mask-lt-pct <value>\n"; cout << " Exclude values less than a given precentile.\n"; cout << " -threshold-le, -mask-le, -mask-below <value>\n"; cout << " Exclude values less than or equal to a given threshold.\n"; cout << " -threshold-le-percentile, -threshold-le-pct, -mask-le-percentile, -mask-le-pct, -mask-below-percentile, -mask-below-pct <value>\n"; cout << " Exclude values less than or equal to a given percentile.\n"; cout << " -threshold-ge, -mask-ge, -mask-above <value>\n"; cout << " Exclude values greater than or equal to a given threshold.\n"; cout << " -threshold-ge-percentile, -threshold-ge-pct, -mask-ge-percentile, -mask-ge-pct, -mask-above-percentile, -mask-above-pct <value>\n"; cout << " Exclude values greater than or equal to a given percentile.\n"; cout << " -threshold-gt, -upper-threshold, -mask-gt <value>\n"; cout << " Exclude values greater than a given threshold.\n"; cout << " -threshold-gt-percentile, -threshold-gt-pct, -upper-percentile-threshold, -upper-pct-threshold, -mask-gt-percentile, -mask-gt-pct <value>\n"; cout << " Exclude values greater than a given percentile.\n"; cout << "\n"; cout << "Data rescaling options:\n"; cout << " -binarize <lower> [<upper>]\n"; cout << " Set values inside the closed interval <lower>..<upper> to one,\n"; cout << " and all other values to zero. The default upper threshold is +inf.\n"; cout << " When the lower threshold is greater than the upper threshold,\n"; cout << " values inside the closed interval <upper>..<lower> are set to zero\n"; cout << " and all other values to one instead. This operation is short for:\n"; cout << " :option:`-threshold-inside` <lower> <upper> :option:`-set` 1 :option:`-pad` 0\n"; cout << " -clamp <lower> <upper>\n"; cout << " Clamp values which are less than a lower or greater than an upper threshold.\n"; cout << " -clamp-percentiles, -clamp-pcts <lower> <upper>\n"; cout << " Clamp values which are less than a lower percentile or greater than an upper percentile.\n"; cout << " -clamp-below, -clamp-lt <value>\n"; cout << " Clamp values less than a given threshold.\n"; cout << " -clamp-below-percentile, -clamp-below-pct, -clamp-lt-percentile, -clamp-lt-pct <value>\n"; cout << " Clamp values less than a given percentile.\n"; cout << " -clamp-above, -clamp-gt <value>\n"; cout << " Clamp values greater than a given threshold.\n"; cout << " -clamp-above-percentile, -clamp-above-pct, -clamp-gt-percentile, -clamp-gt-pct <value>\n"; cout << " Clamp values greater than a given percentile.\n"; cout << " -rescale <min> <max>\n"; cout << " Linearly rescale values to the interval [min, max].\n"; cout << " -map <from> <to>...\n"; cout << " Replaces values equal to <from> by the specified <to> value. Multiple pairs of <from>\n"; cout << " and <to> value replacements can be specified in order to perform the substitutions in\n"; cout << " one step. For example, to swap the two values 1 and 2, use ``-map 1 2 2 1``.\n"; cout << "\n"; cout << "Arithmetic operation options:\n"; cout << " -add, -plus <value> | <file> [<scalars>]\n"; cout << " Add constant value or data sequence read from specified file.\n"; cout << " Another name for this option is the '+' sign, see Examples.\n"; cout << " -sub, -subtract, -minus <value> | <file> [<scalars>]\n"; cout << " Subtract constant value or data sequence read from specified file.\n"; cout << " Another name for this option is the '-' sign, see Examples.\n"; cout << " -mul, -multiply-with, -times <value> | <file> [<scalars>]\n"; cout << " Multiply by constant value or data sequence read from specified file.\n"; cout << " Another name for this option is the '*' sign, see Examples.\n"; cout << " -div, -divide-by, -over <value> | sum | <file> [<scalars>]\n"; cout << " Divide by constant value or data sequence read from specified file.\n"; cout << " When the argument is \"sum\", the divisor is the sum of the values.\n"; cout << " When dividing by zero values in the input file, the result is NaN.\n"; cout << " Use :option:`-mask` with argument NaN and :option:`-pad` to replace\n"; cout << " these undefined values by a constant such as zero.\n"; cout << " Another name for this option is the '/' sign, see Examples.\n"; cout << " -div-with-zero <value> | sum | <file> [<scalars>]\n"; cout << " Same as :option:`-div`, but set result to zero in case of division by zero.\n"; cout << " -abs\n"; cout << " Replace values by their respective absolute value.\n"; cout << " -pow, -power <exponent>\n"; cout << " Raise values to the power of the given exponent.\n"; cout << " -sq, -square\n"; cout << " Raise values to the power of 2 (i.e, -pow 2).\n"; cout << " -sqrt\n"; cout << " Calculate square root of each value (i.e, -pow .5).\n"; cout << " -exp\n"; cout << " Calculate exponential of data sequence.\n"; cout << " -log [<threshold>] [<base>]\n"; cout << " Compute logarithm after applying an optional threshold.\n"; cout << " (default threshold: min double, default base: e)\n"; cout << " -lb, -log2 [<threshold>]\n"; cout << " Compute binary logarithm, alias for :option:`-log` with base 2.\n"; cout << " -ln, -loge [<threshold>]\n"; cout << " Compute natural logarithm, alias for :option:`-log` with base e.\n"; cout << " -lg, -log10 [<threshold>]\n"; cout << " Compute logarithm to base 10, alias for :option:`-log` with base 10.\n"; cout << " -mod, -fmod <denominator>\n"; cout << " Compute modulo division of each value with specified denominator.\n"; cout << " -floor\n"; cout << " Round floating point values to largest integer value that is not greater.\n"; cout << " -ceil\n"; cout << " Round floating point values to smallest integer value that is greater.\n"; cout << " -round\n"; cout << " Round floating point values to the nearest integer value, away from zero for halfway cases.\n"; cout << "\n"; cout << "Data output options:\n"; cout << " -out, -o, -output <file> [<type>] [<name>]\n"; cout << " Write current data sequence to file in the format of the input file.\n"; cout << " Output data type can be: uchar, short, ushort, int, uint, float, double.\n"; cout << " The optional <name> argument can be used to save the modified data\n"; cout << " of an input point set data array with a different name along with the\n"; cout << " input data. Otherwise, the input data values are replaced by the modified\n"; cout << " values and stored with point data array name is unchanged.\n"; cout << " Another name for this option is the '=' sign, but the optional arguments are\n"; cout << " are not supported by this alternative notation. See Examples for usage.\n"; cout << "\n"; cout << "Data statistics options:\n"; cout << " -append <file>\n"; cout << " Append output to a file. (default: STDOUT)\n"; cout << " -delimiter, -delim, -d, -sep\n"; cout << " Delimiting character(s). (default: '')\n"; cout << " -header [<name>...]\n"; cout << " Request output of header line if delimiter was specified as well.\n"; cout << " If the output is appended to a text file, the header is only printed\n"; cout << " if it does not exist. If no or fewer custom column names are given,\n"; cout << " the default names for each statistic are printed. (default: none)\n"; cout << " -prefix <str>...\n"; cout << " One or more prefix strings to print. If no delimiter is specified,\n"; cout << " the concatenated strings are printed before each line of the output.\n"; cout << " Otherwise, each prefix string is printed as entry for the first columns\n"; cout << " in the delimited output row, separated by the specified delimiter. (default: none)\n"; cout << " -precision, -digits <int>\n"; cout << " Number of significant digits. (default: 5)\n"; cout << " -median\n"; cout << " Print median value, i.e., 50th percentile. (default: off)\n"; cout << " -mean, -avg, -average\n"; cout << " Print mean value. (default: on)\n"; cout << " -variance, -var\n"; cout << " Print variance of values. (default: off)\n"; cout << " -sigma, -std, -stddev, -stdev, -sd\n"; cout << " Print standard deviation of values. (default: on)\n"; cout << " -normal-distribution\n"; cout << " Print mean and standard deviation of values.\n"; cout << " Other option names: -mean+sigma, -mean+sd, -avg+std,... (default: off)\n"; cout << " -mad, -mean-absolute-difference, -mean-absolute-deviation\n"; cout << " Print mean absolute difference/deviation around the mean. (default: off)\n"; cout << " -mad-median, -median-absolute-difference, -median-absolute-deviation\n"; cout << " Print mean absolute difference/deviation around the median. (default: off)\n"; cout << " -minimum, -min\n"; cout << " Print minimum value. (default: off)\n"; cout << " -maximum, -max\n"; cout << " Print maximum value. (default: off)\n"; cout << " -extrema, -minmax\n"; cout << " Print minimum and maximum value. (default: on)\n"; cout << " -range\n"; cout << " Print range of values (i.e., max - min). (default: off)\n"; cout << " -percentile, -pct, -p <n>...\n"; cout << " Print n-th percentile. (default: none)\n"; cout << " -lower-percentile-mean, -lpctavg <n>\n"; cout << " Print mean intensity of values less than or equal to the n-th percentile. (default: off)\n"; cout << " -upper-percentile-mean, -upctavg <n>\n"; cout << " Print mean intensity of values greater than or equal to the n-th percentile. (default: off)\n"; cout << " -sum\n"; cout << " Print sum of values. Can be used to count values within a certain range using a thresholding\n"; cout << " followed by :option:`-set` 1 before summing these values. (default: off)\n"; cout << " -count\n"; cout << " Print number of values inside the mask, i.e., values not currently excluded. (default: off)\n"; PrintCommonOptions(cout); cout << "\n"; cout << "Examples:\n"; cout << "\n"; cout << " " << name << " mni305.nii.gz\n"; cout << " Mean = 26.9753\n"; cout << " Standard deviation = 50.3525\n"; cout << " Extrema = [0, 254]\n"; cout << " Range = 254\n"; cout << "\n"; cout << " " << name << " mni305.nii.gz -pct 77\n"; cout << " 77th percentile = 25\n"; cout << "\n"; cout << " " << name << " mni305.nii.gz -padding 25 -range -percentile 25 50 75 -prefix MNI305 '[>25]'\n"; cout << " MNI305 [>25] range = 254\n"; cout << " MNI305 [>25] 25th percentile = 69\n"; cout << " MNI305 [>25] 50th percentile = 113\n"; cout << " MNI305 [>25] 75th percentile = 150\n"; cout << "\n"; cout << " " << name << " mni305.nii.gz -d , -prefix MNI305\n"; cout << " MNI305,26.9753,50.3525,0,254,254 [no newline at end of line]\n"; cout << "\n"; cout << " " << name << " mni305.nii.gz -d , -prefix MNI305 -header\n"; cout << " ,Mean,Sigma,Min,Max,Range\n"; cout << " MNI305,26.9753,50.3525,0,254,254\n"; cout << "\n"; cout << " " << name << " mni305.nii.gz -d , -prefix MNI305 -header ID Mean SD\n"; cout << " ID,Mean,SD,Min,Max,Range\n"; cout << " MNI305,26.9753,50.3525,0,254,254\n"; cout << "\n"; cout << " " << name << " a.nii.gz + b.nii.gz = c.nii.gz\n"; cout << "\n"; cout << " " << name << " a.vtk + b.nii.gz - 10 / c.nii = d.vtk\n"; cout << " Adds data values at identical sequential memory indices in a and b,\n"; cout << " subtracts the constant 10, and then divides by the values in image c.\n"; cout << "\n"; cout << " Note: Operations are always executed from left to right,\n"; cout << " i.e., no mathematical operator precedence is considered!\n"; cout << "\n"; } // ============================================================================= // Main // ============================================================================= // ----------------------------------------------------------------------------- // Some special options do not start with a '-' as otherwise required #undef HAS_ARGUMENT #define HAS_ARGUMENT \ _IsArgument(ARGIDX, argc, argv) && \ strcmp(argv[ARGIDX+1], "+") != 0 && \ strcmp(argv[ARGIDX+1], "/") != 0 && \ strcmp(argv[ARGIDX+1], "=") != 0 // ----------------------------------------------------------------------------- int main(int argc, char **argv) { InitializeIOLibrary(); // Initial data values REQUIRES_POSARGS(1); const char *input_name = POSARG(1); UniquePtr<double[]> data; int datatype = MIRTK_VOXEL_DOUBLE; ImageAttributes attr; #if MIRTK_Image_WITH_VTK const char *scalars_name = nullptr; bool cell_data = false; for (ARGUMENTS_AFTER(1)) { if (OPTION("-point-data") || OPTION("-pointdata") || OPTION("-pd") || OPTION("-scalars")) { scalars_name = ARGUMENT; cell_data = false; } else if (OPTION("-cell-data") || OPTION("-celldata") || OPTION("-cd")) {<|fim▁hole|> } vtkSmartPointer<vtkDataSet> dataset; vtkSmartPointer<vtkDataSetAttributes> arrays; int n = Read(input_name, data, &datatype, &attr, &dataset, scalars_name, cell_data); if (dataset) { if (cell_data) { arrays = dataset->GetCellData(); } else { arrays = dataset->GetPointData(); } } #else // MIRTK_Image_WITH_VTK int n = Read(input_name, data, &datatype, &attr); #endif // MIRTK_Image_WITH_VTK // Optional arguments const double inf = numeric_limits<double>::infinity(); const double nan = numeric_limits<double>::quiet_NaN(); double a, b; int p; const char *append_name = NULL; const char *delimiter = NULL; bool print_header = false; int digits = 5; Array<string> header; Array<string> prefix; Array<UniquePtr<Op> > ops; for (ARGUMENTS_AFTER(1)) { if (OPTION("-append")) { append_name = ARGUMENT; } else if (OPTION("-point-data") || OPTION("-pointdata") || OPTION("-pd") || OPTION("-scalars")) { #if MIRTK_Image_WITH_VTK // Parsed before Read above scalars_name = ARGUMENT; cell_data = false; #else FatalError("Cannot process -point-data of VTK file because MIRTK Image library was built without VTK!"); #endif // MIRTK_Image_WITH_VTK } else if (OPTION("-cell-data") || OPTION("-celldata") || OPTION("-cd")) { #if MIRTK_Image_WITH_VTK // Parsed before Read above scalars_name = ARGUMENT; cell_data = true; #else FatalError("Cannot process -cell-data of VTK file because MIRTK Image library was built without VTK!"); #endif // MIRTK_Image_WITH_VTK } else if (OPTION("-prefix")) { do { prefix.push_back(ARGUMENT); } while (HAS_ARGUMENT); } else if (OPTION("-header")) { print_header = true; while (HAS_ARGUMENT) header.push_back(ARGUMENT); // Masking } else if (OPTION("-label")) { ops.push_back(UniquePtr<Op>(new ResetMask(true))); do { const char *arg = ARGUMENT; const Array<string> parts = Split(arg, ".."); if (parts.size() == 1) { if (!FromString(parts[0], a)) a = nan; b = a; } else if (parts.size() == 2) { if (!FromString(parts[0], a) || !FromString(parts[1], b)) { a = b = nan; } } else { a = b = nan; } if (IsNaN(a) || IsNaN(b)) { FatalError("Invalid -label argument: " << arg); } ops.push_back(UniquePtr<Op>(new MaskInsideInterval(a, b))); } while (HAS_ARGUMENT); ops.push_back(UniquePtr<Op>(new InvertMask())); } else if (OPTION("-mask-all")) { ops.push_back(UniquePtr<Op>(new ResetMask(false))); } else if (OPTION("-reset-mask")) { ops.push_back(UniquePtr<Op>(new ResetMask(true))); } else if (OPTION("-invert-mask")) { ops.push_back(UniquePtr<Op>(new InvertMask())); } else if (OPTION("-mask")) { double c; do { const char *arg = ARGUMENT; if (FromString(arg, c)) { ops.push_back(UniquePtr<Op>(new Mask(c))); } else { const char *fname = arg; const char *aname = nullptr; if (HAS_ARGUMENT) { arg = ARGUMENT; if (HAS_ARGUMENT) { aname = arg; PARSE_ARGUMENT(c); } else if (!FromString(arg, c)) { aname = arg, c = 0.; } } else { c = 0.; #if MIRTK_Image_WITH_VTK if (dataset && arrays->HasArray(fname)) { aname = fname; fname = input_name; } #endif } UniquePtr<Mask> op(new Mask(fname, c)); if (aname) { #if MIRTK_Image_WITH_VTK op->ArrayName(aname); op->IsCellData(cell_data); #else FatalError("Cannot read point set files when build without VTK or wrong usage!"); #endif } ops.push_back(UniquePtr<Op>(op.release())); break; } } while (HAS_ARGUMENT); } else if (OPTION("-threshold-outside") || OPTION("-mask-outside")) { PARSE_ARGUMENT(a); PARSE_ARGUMENT(b); ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(a, b))); } else if (OPTION("-threshold-outside-percentiles") || OPTION("-threshold-outside-pcts") || OPTION("-mask-outside-percentiles") || OPTION("-mask-outside-pcts")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); Op *op = new MaskOutsideOpenInterval(&a->Value(), &b->Value()); ops.push_back(UniquePtr<Op>(op)); } else if (OPTION("-threshold")) { PARSE_ARGUMENT(a); if (HAS_ARGUMENT) PARSE_ARGUMENT(b); else b = inf; ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(a, b))); } else if (OPTION("-percentile-threshold") || OPTION("-pct-threshold")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); Op *op = new MaskOutsideInterval(&a->Value(), inf); ops.push_back(UniquePtr<Op>(op)); } else if (OPTION("-threshold-percentiles") || OPTION("-threshold-pcts")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); Op *op = new MaskOutsideInterval(&a->Value(), &b->Value()); ops.push_back(UniquePtr<Op>(op)); } else if (OPTION("-threshold-inside") || OPTION("-mask-inside")) { PARSE_ARGUMENT(a); PARSE_ARGUMENT(b); ops.push_back(UniquePtr<Op>(new MaskInsideInterval(a, b))); } else if (OPTION("-threshold-inside-percentiles") || OPTION("-threshold-inside-pcts") || OPTION("-mask-inside-percentiles") || OPTION("-mask-inside-pcts")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); Op *op = new MaskInsideInterval(&a->Value(), &b->Value()); ops.push_back(UniquePtr<Op>(op)); } else if (OPTION("-threshold-lt") || OPTION("-lower-threshold") || OPTION("-mask-lt")) { PARSE_ARGUMENT(a); ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(a, inf))); } else if (OPTION("-threshold-lt-percentile") || OPTION("-threshold-lt-pct") || OPTION("-lower-percentile-threshold") || OPTION("-lower-pct-threshold") || OPTION("-mask-lt-percentile") || OPTION("-mask-lt-pct")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(&a->Value(), inf))); } else if (OPTION("-threshold-le") || OPTION("-mask-below") || OPTION("-mask-le")) { PARSE_ARGUMENT(a); ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(a, inf))); } else if (OPTION("-threshold-le-percentile") || OPTION("-threshold-le-pct") || OPTION("-mask-below-percentile") || OPTION("-mask-below-pct") || OPTION("-mask-le-percentile") || OPTION("-mask-le-pct")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(&a->Value(), inf))); } else if (OPTION("-threshold-ge") || OPTION("-mask-above") || OPTION("-mask-ge")) { PARSE_ARGUMENT(b); ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(-inf, b))); } else if (OPTION("-threshold-ge-percentile") || OPTION("-threshold-ge-pct") || OPTION("-mask-above-percentile") || OPTION("-mask-above-pct") || OPTION("-mask-ge-percentile") || OPTION("-mask-ge-pct")) { PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(-inf, &b->Value()))); } else if (OPTION("-threshold-gt") || OPTION("-upper-threshold") || OPTION("-mask-gt")) { PARSE_ARGUMENT(b); ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(-inf, b))); } else if (OPTION("-threshold-gt-percentile") || OPTION("-threshold-gt-pct") || OPTION("-upper-percentile-threshold") || OPTION("-upper-pct-threshold") || OPTION("-mask-gt-percentile") || OPTION("-mask-gt-pct")) { PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(-inf, &b->Value()))); } else if (OPTION("-even")) { ops.push_back(UniquePtr<Op>(new MaskOddValues())); } else if (OPTION("-odd")) { ops.push_back(UniquePtr<Op>(new MaskEvenValues())); // Clamping } else if (OPTION("-clamp")) { PARSE_ARGUMENT(a); PARSE_ARGUMENT(b); ops.push_back(UniquePtr<Op>(new Clamp(a, b))); } else if (OPTION("-clamp-percentiles") || OPTION("-clamp-pcts")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); ops.push_back(UniquePtr<Op>(new Clamp(&a->Value(), &b->Value()))); } else if (OPTION("-clamp-lt") || OPTION("-clamp-below")) { PARSE_ARGUMENT(a); ops.push_back(UniquePtr<Op>(new LowerThreshold(a))); } else if (OPTION("-clamp-lt-percentile") || OPTION("-clamp-lt-pct") || OPTION("-clamp-below-percentile") || OPTION("-clamp-below-pct")) { PARSE_ARGUMENT(p); Statistic *a = new Percentile(p); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); ops.push_back(UniquePtr<Op>(new LowerThreshold(&a->Value()))); } else if (OPTION("-clamp-gt") || OPTION("-clamp-above")) { PARSE_ARGUMENT(b); ops.push_back(UniquePtr<Op>(new UpperThreshold(b))); } else if (OPTION("-clamp-gt-percentile") || OPTION("-clamp-gt-pct") || OPTION("-clamp-above-percentile") || OPTION("-clamp-above-pct")) { PARSE_ARGUMENT(p); Statistic *b = new Percentile(p); b->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(b)); ops.push_back(UniquePtr<Op>(new UpperThreshold(&b->Value()))); } else if (OPTION("-rescale")) { double min, max; if (!FromString(ARGUMENT, min)) { cerr << "Invalid -rescale minimum, must be a number!" << endl; exit(1); } if (!FromString(ARGUMENT, max)) { cerr << "Invalid -rescale maximum, must be a number!" << endl; exit(1); } ops.push_back(UniquePtr<Op>(new Rescale(min, max))); } else if (OPTION("-set") || OPTION("-inside")) { double inside_value; if (!FromString(ARGUMENT, inside_value)) { cerr << "Invalid -inside value, must be a number!" << endl; exit(1); } ops.push_back(UniquePtr<Op>(new SetInsideValue(inside_value))); } else if (OPTION("-pad") || OPTION("-outside")) { double outside_value; if (!FromString(ARGUMENT, outside_value)) { cerr << "Invalid -outside value, must be a number!" << endl; exit(1); } ops.push_back(UniquePtr<Op>(new SetOutsideValue(outside_value))); // Data transformations } else if (OPTION("-binarize")) { PARSE_ARGUMENT(a); if (HAS_ARGUMENT) PARSE_ARGUMENT(b); else b = inf; ops.push_back(UniquePtr<Op>(new Binarize(a, b))); } else if (OPTION("-map")) { UniquePtr<Map> map(new Map()); do { const char * const arg1 = ARGUMENT; const char * const arg2 = ARGUMENT; if (!FromString(arg1, a) || !FromString(arg2, b)) { FatalError("Arguments of -map option must be pairs of two numbers (i.e., number of arguments must be even)!"); } map->Insert(a, b); } while (HAS_ARGUMENT); ops.push_back(UniquePtr<Op>(map.release())); } else if (OPTION("-add") || OPTION("-plus") || OPTION("+")) { const char *arg = ARGUMENT; double c; if (FromString(arg, c)) { ops.push_back(UniquePtr<Op>(new Add(c))); } else { const char *fname = arg; const char *aname = nullptr; if (HAS_ARGUMENT) { aname = ARGUMENT; } else { #if MIRTK_Image_WITH_VTK if (dataset && arrays->HasArray(fname)) { aname = fname; fname = input_name; } #endif } UniquePtr<Add> op(new Add(fname)); if (aname) { #if MIRTK_Image_WITH_VTK op->ArrayName(aname); op->IsCellData(cell_data); #else FatalError("Cannot read scalars from point set file when build without VTK or wrong usage!"); #endif } ops.push_back(UniquePtr<Op>(op.release())); } } else if (OPTION("-sub") || OPTION("-subtract") || OPTION("-minus") || OPTION("-")) { const char *arg = ARGUMENT; double c; if (FromString(arg, c)) { ops.push_back(UniquePtr<Op>(new Sub(c))); } else { const char *fname = arg; const char *aname = nullptr; if (HAS_ARGUMENT) { aname = ARGUMENT; } else { #if MIRTK_Image_WITH_VTK if (dataset && arrays->HasArray(fname)) { aname = fname; fname = input_name; } #endif } UniquePtr<Sub> op(new Sub(fname)); if (aname) { #if MIRTK_Image_WITH_VTK op->ArrayName(aname); op->IsCellData(cell_data); #else FatalError("Cannot read point set files when build without VTK or wrong usage!"); #endif } ops.push_back(UniquePtr<Op>(op.release())); } } else if (OPTION("-mul") || OPTION("-multiply-by") || OPTION("-times") || OPTION("*")) { const char *arg = ARGUMENT; double c; if (FromString(arg, c)) { ops.push_back(UniquePtr<Op>(new Mul(c))); } else { const char *fname = arg; const char *aname = nullptr; if (HAS_ARGUMENT) { aname = ARGUMENT; } else { #if MIRTK_Image_WITH_VTK if (dataset && arrays->HasArray(fname)) { aname = fname; fname = input_name; } #endif } UniquePtr<Mul> op(new Mul(fname)); if (aname) { #if MIRTK_Image_WITH_VTK op->ArrayName(aname); op->IsCellData(cell_data); #else FatalError("Cannot read point set files when build without VTK or wrong usage!"); #endif } ops.push_back(UniquePtr<Op>(op.release())); } } else if (OPTION("-div") || OPTION("-divide-by") || OPTION("-over") || OPTION("/")) { const char *arg = ARGUMENT; double c; if (ToLower(arg) == "sum") { Statistic *a = new Sum(); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); ops.push_back(UniquePtr<Op>(new Div(&a->Value()))); } else if (FromString(arg, c)) { if (fequal(c, .0)) { cerr << "Invalid -div argument, value must not be zero!" << endl; exit(1); } ops.push_back(UniquePtr<Op>(new Div(c))); } else { const char *fname = arg; const char *aname = nullptr; if (HAS_ARGUMENT) { aname = ARGUMENT; } else { #if MIRTK_Image_WITH_VTK if (dataset && arrays->HasArray(fname)) { aname = fname; fname = input_name; } #endif } UniquePtr<Div> op(new Div(fname)); if (aname) { #if MIRTK_Image_WITH_VTK op->ArrayName(aname); op->IsCellData(cell_data); #else FatalError("Cannot read point set files when build without VTK or wrong usage!"); #endif } ops.push_back(UniquePtr<Op>(op.release())); } } else if (OPTION("-div-with-zero")) { const char *arg = ARGUMENT; double c; if (ToLower(arg) == "sum") { Statistic *a = new Sum(); a->Hidden(verbose < 1); ops.push_back(UniquePtr<Op>(a)); ops.push_back(UniquePtr<Op>(new DivWithZero(&a->Value()))); } else if (FromString(arg, c)) { ops.push_back(UniquePtr<Op>(new DivWithZero(c))); } else { const char *fname = arg; const char *aname = nullptr; if (HAS_ARGUMENT) { aname = ARGUMENT; } else { #if MIRTK_Image_WITH_VTK if (dataset && arrays->HasArray(fname)) { aname = fname; fname = input_name; } #endif } UniquePtr<DivWithZero> op(new DivWithZero(fname)); if (aname) { #if MIRTK_Image_WITH_VTK op->ArrayName(aname); op->IsCellData(cell_data); #else FatalError("Cannot read point set files when build without VTK or wrong usage!"); #endif } ops.push_back(UniquePtr<Op>(op.release())); } } else if (OPTION("-abs")) { ops.push_back(UniquePtr<Op>(new Abs())); } else if (OPTION("-pow") || OPTION("-power")) { const char *arg = ARGUMENT; double exponent; if (!FromString(arg, exponent)) { cerr << "Invalid -power value, must be a number!" << endl; exit(1); } ops.push_back(UniquePtr<Op>(new Pow(exponent))); } else if (OPTION("-sqrt")) { ops.push_back(UniquePtr<Op>(new Pow(.5))); } else if (OPTION("-square") || OPTION("-sq")) { ops.push_back(UniquePtr<Op>(new Pow(2.0))); } else if (OPTION("-exp")) { ops.push_back(UniquePtr<Op>(new Exp())); } else if (OPTION("-log") || OPTION("-log2") || OPTION("-loge") || OPTION("-log10") || OPTION("-lb") || OPTION("-ln") || OPTION("-lg")) { a = numeric_limits<double>::min(); if (HAS_ARGUMENT) { PARSE_ARGUMENT(a); if (a <= .0) { cerr << "Invalid -log threshold argument, must be a positive number" << endl; exit(1); } } Op *op = nullptr; if (strcmp(OPTNAME, "-log") == 0) { if (HAS_ARGUMENT) { double base; if (!FromString(ARGUMENT, base)) { char c; if (!FromString(ARGUMENT, c) || c != 'e') { cerr << "Invalid -log base argument, must be a positive number or character e" << endl; exit(1); } op = new Ln(a); } else { op = new Log(base, a); } } else { op = new Ln(a); } } else if (strcmp(OPTNAME, "-log2") == 0 || strcmp(OPTNAME, "-lb") == 0) { op = new Lb(a); } else if (strcmp(OPTNAME, "-log10") == 0 || strcmp(OPTNAME, "-lg") == 0) { op = new Lg(a); } else if (strcmp(OPTNAME, "-loge") == 0 || strcmp(OPTNAME, "-ln") == 0) { op = new Ln(a); } ops.push_back(UniquePtr<Op>(op)); } else if (OPTION("-mod") || OPTION("-fmod")) { const char *arg = ARGUMENT; double denominator; if (!FromString(arg, denominator) || abs(denominator) < 1e-12) { cerr << "Invalid -mod value, must be a non-zero number!" << endl; exit(1); } ops.push_back(UniquePtr<Op>(new Mod(denominator))); } else if (OPTION("-floor")) { ops.push_back(UniquePtr<Op>(new Floor())); } else if (OPTION("-ceil")) { ops.push_back(UniquePtr<Op>(new Ceil())); } else if (OPTION("-round")) { ops.push_back(UniquePtr<Op>(new Round())); } else if (OPTION("=")) { const char *fname = ARGUMENT; #if MIRTK_Image_WITH_VTK ops.push_back(UniquePtr<Op>(new Write(fname, datatype, attr, dataset, scalars_name, scalars_name))); #else ops.push_back(UniquePtr<Op>(new Write(fname, datatype, attr))); #endif } else if (OPTION("-o") || OPTION("-out") || OPTION("-output")) { const char *fname = ARGUMENT; int dtype = datatype; #if MIRTK_Image_WITH_VTK const char *output_scalars_name = scalars_name; #endif if (HAS_ARGUMENT) { const char *arg = ARGUMENT; dtype = ToDataType(arg); if (dtype == MIRTK_VOXEL_UNKNOWN) { cerr << "Invalid -out data type " << arg << endl; exit(1); } if (HAS_ARGUMENT) { #if MIRTK_Image_WITH_VTK output_scalars_name = ARGUMENT; #else Warning("Output scalars array name argument of -output option ignored"); #endif } } #if MIRTK_Image_WITH_VTK ops.push_back(UniquePtr<Op>(new Write(fname, dtype, attr, dataset, scalars_name, output_scalars_name, cell_data))); #else ops.push_back(UniquePtr<Op>(new Write(fname, dtype, attr))); #endif // Data statistics } else if (OPTION("-median")) { ops.push_back(UniquePtr<Op>(new Median())); } else if (OPTION("-mean") || OPTION("-average") || OPTION("-avg")) { ops.push_back(UniquePtr<Op>(new Mean())); } else if (OPTION("-sigma") || OPTION("-stddev") || OPTION("-stdev") || OPTION("-std") || OPTION("-sd")) { ops.push_back(UniquePtr<Op>(new StDev())); } else if (OPTION("-normal-distribution") || OPTION("-mean+sigma") || OPTION("-mean+stddev") || OPTION("-mean+stdev") || OPTION("-mean+std") || OPTION("-mean+sd") || OPTION("-avg+sigma") || OPTION("-avg+stddev") || OPTION("-avg+stdev") || OPTION("-avg+std") || OPTION("-avg+sd")) { ops.push_back(UniquePtr<Op>(new NormalDistribution())); } else if (OPTION("-variance") || OPTION("-var")) { ops.push_back(UniquePtr<Op>(new Var())); } else if (OPTION("-mean-absolute-difference") || OPTION("-mean-absolute-deviation") || OPTION("-mad") || OPTION("-mad-mean")) { ops.push_back(UniquePtr<Op>(new MeanAbsoluteDifference())); } else if (OPTION("-median-absolute-difference") || OPTION("-median-absolute-deviation") || OPTION("-mad-median")) { ops.push_back(UniquePtr<Op>(new MedianAbsoluteDifference())); } else if (OPTION("-minimum") || OPTION("-min")) { ops.push_back(UniquePtr<Op>(new Min())); } else if (OPTION("-maximum") || OPTION("-max")) { ops.push_back(UniquePtr<Op>(new Max())); } else if (OPTION("-extrema") || OPTION("-minmax")) { ops.push_back(UniquePtr<Op>(new Extrema())); } else if (OPTION("-range")) { ops.push_back(UniquePtr<Op>(new Range())); } else if (OPTION("-percentile") || OPTION("-pct") || OPTION("-p")) { do { int p; if (FromString(ARGUMENT, p) && 0 <= p && p <= 100) { ops.push_back(UniquePtr<Op>(new Percentile(p))); } else { cerr << "Invalid -percentile value, must be integer in the range [0, 100]!" << endl; exit(1); } } while (HAS_ARGUMENT); } else if (OPTION("-lower-percentile-mean") || OPTION("-lpctavg")) { do { int p; if (FromString(ARGUMENT, p) && 0 <= p && p <= 100) { ops.push_back(UniquePtr<Op>(new LowerPercentileMean(p))); } else { cerr << "Invalid -lower-percentile-mean value, must be integer in the range [0, 100]!" << endl; exit(1); } } while (HAS_ARGUMENT); } else if (OPTION("-upper-percentile-mean") || OPTION("-upctavg")) { do { int p; if (FromString(ARGUMENT, p) && 0 <= p && p <= 100) { ops.push_back(UniquePtr<Op>(new UpperPercentileMean(p))); } else { cerr << "Invalid -upper-percentile-mean value, must be integer in the range [0, 100]!" << endl; exit(1); } } while (HAS_ARGUMENT); } else if (OPTION("-sum")) { ops.push_back(UniquePtr<Op>(new Sum())); } else if (OPTION("-count")) { ops.push_back(UniquePtr<Op>(new Count())); } else if (OPTION("-delimiter") || OPTION("-delim") || OPTION("-d") || OPTION("-sep")) { delimiter = ARGUMENT; } else if (OPTION("-precision") || OPTION("-digits")) { if (!FromString(ARGUMENT, digits) || digits < 0) { cerr << "Invalid -precision argument, value must be non-negative integer!" << endl; exit(1); } } else { HANDLE_COMMON_OR_UNKNOWN_OPTION(); } } // If delimiter explicitly set to empty string, use none if (delimiter && delimiter[0] == '\0') delimiter = NULL; // Default statistics to compute if (ops.empty()) { ops.push_back(UniquePtr<Statistic>(new Mean())); ops.push_back(UniquePtr<Statistic>(new StDev())); ops.push_back(UniquePtr<Statistic>(new Extrema())); ops.push_back(UniquePtr<Statistic>(new Range())); } // Initial data mask UniquePtr<bool[]> mask(new bool[n]); for (int i = 0; i < n; ++i) { if (IsNaN(data[i])) { mask[i] = false; } else { mask[i] = true; } } // Process input data, either transform it or compute statistics from it for (size_t i = 0; i < ops.size(); ++i) { ops[i]->Process(n, data.get(), mask.get()); } mask.reset(); // Open output file to append to or use STDOUT if none specified ofstream ofs; if (append_name) { if (print_header) { ifstream ifs(append_name); if (ifs.is_open()) { print_header = false; ifs.close(); } } ofs.open(append_name, ios_base::app); if (!ofs.is_open()) { FatalError("Cannot append to file " << append_name); } } ostream &out = (ofs.is_open() ? ofs : cout); // Print column names if requested if (delimiter && print_header) { size_t c = 0; for (size_t i = 0; i < prefix.size(); ++i, ++c) { if (c > 0) out << delimiter; if (c < header.size()) out << header[c]; } for (size_t i = 0; i < ops.size(); ++i) { Statistic *stat = dynamic_cast<Statistic *>(ops[i].get()); if (stat != nullptr && !stat->Hidden()) { for (size_t j = 0; j < stat->Names().size(); ++j, ++c) { if (c > 0) out << delimiter; if (c < header.size()) out << header[c]; else out << stat->Names()[j]; } } } out << endl; } // Print image statistics if (delimiter) { for (size_t i = 0; i < prefix.size(); ++i) { if (i > 0) out << delimiter; out << prefix[i]; } bool first = prefix.empty(); for (size_t i = 0; i < ops.size(); ++i) { Statistic *stat = dynamic_cast<Statistic *>(ops[i].get()); if (stat != nullptr && !stat->Hidden() && !stat->Names().empty()) { if (!first) out << delimiter; else first = false; stat->PrintValues(out, digits, delimiter); } } // No newline at end of row if printing results to STDOUT which in this // case is usually assigned to a string in a calling script if (print_header || ofs.is_open()) out << endl; } else { string prefix_string; for (size_t i = 0; i < prefix.size(); ++i) { if (i > 0) prefix_string += ' '; prefix_string += prefix[i]; } for (size_t i = 0; i < ops.size(); ++i) { Statistic *stat = dynamic_cast<Statistic *>(ops[i].get()); if (stat != nullptr && !stat->Hidden()) { stat->Print(out, digits, prefix_string.c_str()); } } } ofs.close(); return 0; }<|fim▁end|>
scalars_name = ARGUMENT; cell_data = true; }
<|file_name|>wizard_report.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- ############################################################################## # # OpenERP module # Copyright (C) 2010 Micronaet srl (<http://www.micronaet.it>) and the # Italian OpenERP Community (<http://www.openerp-italia.com>) # # ######################################################################## # OpenERP, Open Source Management Solution # Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging from osv import osv, fields from tools.translate import _ from datetime import datetime, timedelta _logger = logging.getLogger(__name__) month_list= [('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','Maj'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December'), ] # WIZARD INTERVENT REPORT ###################################################### class contract_report_intervent_wizard(osv.osv_memory): ''' Middle window to choose intervent report parameter ''' _name = 'contract.report.intervent.wizard' _description = 'Intervent report wizard' # Button events: def print_invoice(self, cr, uid, ids, context=None): ''' Redirect to intervent print passing parameters ''' wiz_proxy = self.browse(cr, uid, ids)[0] datas = {} if wiz_proxy.all: datas['department_id'] = False datas['department_name'] = 'All' else: datas['department_id'] = wiz_proxy.department_id.id datas['department_name'] = wiz_proxy.department_id.name if wiz_proxy.absence_account_id: datas['absence_account_id'] = wiz_proxy.absence_account_id.id datas['absence_account_name'] = wiz_proxy.absence_account_id.name datas['month'] = wiz_proxy.month datas['year'] = wiz_proxy.year # not_work report: datas['user_id'] = wiz_proxy.user_id.id datas['user_name'] = wiz_proxy.user_id.name datas['from_date'] = wiz_proxy.from_date datas['to_date'] = wiz_proxy.to_date datas['detailed'] = wiz_proxy.detailed if wiz_proxy.mode == 'intervent': report_name = 'intervent_report' elif wiz_proxy.mode == 'absence': report_name = 'absence_report' else: report_name = 'not_work_report' return { 'type': 'ir.actions.report.xml', 'report_name': report_name, 'datas': datas, } _columns = { 'all': fields.boolean('All department', required=False), 'department_id': fields.many2one('hr.department', 'Department', required=False), 'year': fields.integer('Year'), 'month': fields.selection(month_list, 'Month', select=True), # For not_work: 'user_id': fields.many2one('res.users', 'Employee / User'), 'from_date': fields.date('From date >='), 'to_date': fields.date('To date <='), 'detailed': fields.boolean('Detailed'), 'mode': fields.selection([ ('intervent','Intervent'), ('absence','Absence'), ('not_work','Not work status'), # statistic on absence ], 'Mode', select=True, readonly=False, required=True), 'absence_account_id': fields.many2one('account.analytic.account', 'Absence type', required=False, help="If absence report is only for one type of account"), } _defaults = { 'all': lambda *a: True, 'month': lambda *a: datetime.now().strftime('%m'), 'year': lambda *a: datetime.now().strftime('%Y'), 'mode': lambda *a: 'intervent', } contract_report_intervent_wizard() # WIZARD CONTRACT DEPT. REPORT ################################################ class contract_department_report_wizard(osv.osv_memory): ''' Middle window to choose intervent report parameter ''' _name = 'contract.department.report.wizard' _description = 'Contract dept. report wizard' # Button events: def print_invoice(self, cr, uid, ids, context=None): ''' Redirect to contract dept. print passing parameters ''' wiz_proxy = self.browse(cr, uid, ids)[0] datas = {} if wiz_proxy.mode == 'detailed': # Detailed report #################### # block: datas['hour'] = wiz_proxy.hour datas['cost'] = wiz_proxy.cost datas['invoice'] = wiz_proxy.invoice datas['balance'] = wiz_proxy.balance datas['supplier'] = wiz_proxy.supplier # date: datas['start_date'] = wiz_proxy.start_date datas['end_date'] = wiz_proxy.end_date datas['active_contract'] = wiz_proxy.active_contract datas['date_summary'] = (wiz_proxy.end_date or wiz_proxy.start_date) and wiz_proxy.date_summary # True if there's one date and set to true # report name report='contracts_report' elif wiz_proxy.mode == 'list': # Simple list report ################## datas['active'] = wiz_proxy.active report = 'dept_contract_list_report'<|fim▁hole|> datas['start_date'] = wiz_proxy.start_date datas['end_date'] = wiz_proxy.end_date report='dept_contract_summary' # TODO create report if wiz_proxy.all_contract: datas['contract_id'] = False if wiz_proxy.all: datas['department_id'] = False else: datas['department_id'] = wiz_proxy.department_id.id datas['department_name'] = wiz_proxy.department_id.name else: # contract selected: datas['contract_id'] = wiz_proxy.contract_id.id datas['contract_name'] = wiz_proxy.contract_id.name datas['department_id'] = wiz_proxy.contract_id.department_id.id if wiz_proxy.contract_id.department_id else False datas['department_name'] = wiz_proxy.department_id.name return { 'type': 'ir.actions.report.xml', 'report_name': report, #'dept_contract_list_report', 'datas': datas, } _columns = { 'all_contract': fields.boolean('All contract',), 'active_contract': fields.boolean('Active contract',), 'contract_id': fields.many2one('account.analytic.account', 'Contract', required=False, help="All 'working' contract in contract list (absence fake contract not visible)"), 'all':fields.boolean('All department',), 'active':fields.boolean('Only active', help='In open state'), 'department_id':fields.many2one('hr.department', 'Department', required=False), 'mode':fields.selection([ ('list','Short list'), ('detailed','Detailed'), ('summary','Summary'), ],'Mode', select=True, required=True), # Blocks: 'hour':fields.boolean('With hours'), 'cost':fields.boolean('With cost'), 'invoice':fields.boolean('With invoice'), 'balance':fields.boolean('With balance'), 'supplier':fields.boolean('With supplier invoice'), 'date_summary':fields.boolean('With date summary', required=False), 'start_date': fields.date('Start date', help="Start date of period, for evaluate costs, intervent, invoice"), 'end_date': fields.date('End Date', help="End date of period, for evaluate cost, intervent, invoice"), } _defaults = { 'mode': lambda *a: 'list', 'all': lambda *a: True, 'active': lambda *a: False, 'all_contract': lambda *a: True, 'hour': lambda *a: True, 'cost': lambda *a: True, 'invoice': lambda *a: True, 'balance': lambda *a: True, 'supplier': lambda *a: True, 'date_summary': lambda *a: True, } contract_department_report_wizard() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
else: # Summary report ################################################ #datas['department_id'] = wiz_proxy.department_id.id if wiz_proxy.department_id else False
<|file_name|>moment-plugin.ts<|end_file_name|><|fim▁begin|>import * as $ from 'jquery'; import * as moment from 'moment'; /* tslint:disable:no-function-expression */ ($.fn.DataTable as any).render.moment = function (from: string, to: string, locale: string): (d: any, type: string, row: any) => string { /* tslint:enable */ // Argument shifting if (arguments.length === 1) { locale = 'en'; to = from; from = 'YYYY-MM-DD'; } else if (arguments.length === 2) { locale = 'en'; } return (d, type, row): string => { var m = moment(d, from, locale, true);<|fim▁hole|> // Order and type get a number value from Moment, everything else // sees the rendered value return m.format(type === 'sort' || type === 'type' ? 'x' : to); }; };<|fim▁end|>
<|file_name|>getAllMatchers.test.js<|end_file_name|><|fim▁begin|>const getAllMatchers = require("./getAllMatchers"); describe("unit: getAllMatchers", () => { let handler; let matcherStore; beforeEach(() => { matcherStore = [{}, {}, {}]; handler = getAllMatchers(matcherStore); }); test("it should return all matchers", () => { expect(handler()).toHaveProperty("body", matcherStore); }); test("it should return a status of 200", () => {<|fim▁hole|><|fim▁end|>
expect(handler()).toHaveProperty("status", 200); }); });
<|file_name|>ftpDownloader-test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Sat May 21 16:43:47 2016 @author: Pratik """ from ftplib import FTP import os # login and download file from ftp site and retrieve file (use default params) def ftpDownloader(filename, host="ftp.pyclass.com", user="[email protected]", passwd="student123"): ftp = FTP(host) # get the host url of ftp site ftp.login(user, passwd) # login with username and password ftp.cwd('Data') # change directory to Data<|fim▁hole|><|fim▁end|>
os.chdir("/Users/Pratik/Documents/Pratik/Work/practice/py-data-analysis") # change directory print(ftp.nlst()) # print list of all files in dir with open(filename, 'wb') as file: # open file and w/r ftp.retrbinary('RETR %s' % filename, file.write) # read contents of pdf and write to our file
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_name="otp"] #![crate_type="lib"] use std::time::{SystemTime, SystemTimeError}; use std::convert::TryInto; use data_encoding::{BASE32_NOPAD, DecodeError}; use err_derive::Error; use ring::hmac; #[derive(Debug, Error)] pub enum Error { #[error(display="invalid time provided")] InvalidTimeError(#[error(source)] SystemTimeError), #[error(display="invalid digest provided: {:?}", _0)] InvalidDigest(Vec<u8>), #[error(display="invalid secret provided")] InvalidSecret(#[error(source)] DecodeError) } /// Decodes a secret (given as an RFC4648 base32-encoded ASCII string) /// into a byte string fn decode_secret(secret: &str) -> Result<Vec<u8>, DecodeError> { BASE32_NOPAD.decode(secret.as_bytes()) } /// Calculates the HMAC digest for the given secret and counter. fn calc_digest(decoded_secret: &[u8], counter: u64) -> hmac::Tag { let key = hmac::Key::new(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY, decoded_secret); hmac::sign(&key, &counter.to_be_bytes()) } /// Encodes the HMAC digest into a 6-digit integer. fn encode_digest(digest: &[u8]) -> Result<u32, Error> { let offset = match digest.last() { Some(x) => *x & 0xf, None => return Err(Error::InvalidDigest(Vec::from(digest))) } as usize; let code_bytes: [u8; 4] = match digest[offset..offset+4].try_into() { Ok(x) => x, Err(_) => return Err(Error::InvalidDigest(Vec::from(digest))) }; let code = u32::from_be_bytes(code_bytes); Ok((code & 0x7fffffff) % 1_000_000) } /// Performs the [HMAC-based One-time Password Algorithm](http://en.wikipedia.org/wiki/HMAC-based_One-time_Password_Algorithm) /// (HOTP) given an RFC4648 base32 encoded secret, and an integer counter. pub fn make_hotp(secret: &str, counter: u64) -> Result<u32, Error> { let decoded = decode_secret(secret)?; encode_digest(calc_digest(decoded.as_slice(), counter).as_ref()) } /// Helper function for `make_totp` to make it testable. Note that times /// before Unix epoch are not supported. fn make_totp_helper(secret: &str, time_step: u64, skew: i64, time: u64) -> Result<u32, Error> { let counter = ((time as i64 + skew) as u64) / time_step; make_hotp(secret, counter) } /// Performs the [Time-based One-time Password Algorithm](http://en.wikipedia.org/wiki/Time-based_One-time_Password_Algorithm) /// (TOTP) given an RFC4648 base32 encoded secret, the time step in seconds, /// and a skew in seconds. pub fn make_totp(secret: &str, time_step: u64, skew: i64) -> Result<u32, Error> { let now = SystemTime::now(); let time_since_epoch = now.duration_since(SystemTime::UNIX_EPOCH)?; match make_totp_helper(secret, time_step, skew, time_since_epoch.as_secs() ) { Ok(d) => Ok(d), Err(err) => return Err(err) } } #[cfg(test)] mod tests { use super::{make_hotp, make_totp_helper}; <|fim▁hole|> assert_eq!(make_hotp("BASE32SECRET3232", 0).unwrap(), 260182); assert_eq!(make_hotp("BASE32SECRET3232", 1).unwrap(), 55283); assert_eq!(make_hotp("BASE32SECRET3232", 1401).unwrap(), 316439); } #[test] fn totp() { assert_eq!(make_totp_helper("BASE32SECRET3232", 30, 0, 0).unwrap(), 260182); assert_eq!(make_totp_helper("BASE32SECRET3232", 3600, 0, 7).unwrap(), 260182); assert_eq!(make_totp_helper("BASE32SECRET3232", 30, 0, 35).unwrap(), 55283); assert_eq!(make_totp_helper("BASE32SECRET3232", 1, -2, 1403).unwrap(), 316439); } }<|fim▁end|>
#[test] fn hotp() {
<|file_name|>pindel.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import logging import argparse, os, shutil, subprocess, sys, tempfile, time, shlex, re import datetime from multiprocessing import Pool import vcf def execute(cmd, output=None): import subprocess, sys, shlex # function to execute a cmd and report if an error occur print(cmd) try: process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) stdout,stderr = process.communicate() except Exception, e: # une erreur de ma commande : stderr sys.stderr.write("problem doing : %s\n%s\n" %(cmd, e)) return if output: output = open(output, 'w') output.write(stdout) output.close() if stderr != '': # une erreur interne au programme : stdout (sinon, souvent des warning arrete les programmes) sys.stdout.write("warning or error while doing : %s\n-----\n%s-----\n\n" %(cmd, stderr)) def indexBam(workdir, inputFastaFile, inputBamFile, bam_number, inputBamFileIndex=None): inputFastaLink = os.path.join(os.path.abspath(workdir), "reference.fa" ) if not os.path.exists(inputFastaLink): os.symlink(inputFastaFile, inputFastaLink) cmd = "samtools faidx %s" %(inputFastaLink) execute(cmd) inputBamLink = os.path.join(os.path.abspath(workdir), "sample_%d.bam" % (bam_number) ) os.symlink(inputBamFile, inputBamLink) if inputBamFileIndex is None:<|fim▁hole|> execute(cmd) else: os.symlink(inputBamFileIndex, inputBamLink + ".bai") return inputFastaLink, inputBamLink def config(inputBamFiles, meanInsertSizes, tags, tempDir): print("Creating Config File.") configFile = tempDir+"/pindel_configFile" fil = open(configFile, 'w') for inputBamFile, meanInsertSize, tag in zip(inputBamFiles, meanInsertSizes, tags): fil.write("%s\t%s\t%s\n" %(inputBamFile, meanInsertSize, tag)) fil.close() return configFile def pindel(reference, configFile, args, tempDir, chrome=None): if chrome is None: pindel_file_base = tempDir + "/pindel" else: pindel_file_base = tempDir + "/pindel_" + chrome cmd = "pindel -f %s -i %s -o %s " %(reference, configFile, pindel_file_base ) if args.input_SV_Calls_for_assembly: cmd += ' --input_SV_Calls_for_assembly %s ' %(args.input_SV_Calls_for_assembly) if args.breakdancer: cmd += ' --breakdancer %s ' %(args.breakdancer) if args.exclude is not None: cmd += ' --exclude %s' % (args.exclude) if args.include is not None: cmd += ' --include %s' % (args.include) opt_list = [ ["number_of_threads", "%d"], ["max_range_index", "%d"], ["window_size", "%d"], ["sequencing_error_rate", "%f"], ["sensitivity", "%f"], ["maximum_allowed_mismatch_rate", "%f"], ["NM", "%d"], ["additional_mismatch", "%d"], ["min_perfect_match_around_BP", "%d"], ["min_inversion_size", "%d"], ["min_num_matched_bases", "%d"], ["balance_cutoff", "%d"], ["anchor_quality", "%d"], ["minimum_support_for_event", "%d"] ] for o, f in opt_list: if getattr(args, o) is not None: cmd += (" --%s %s" % (o, f)) % (getattr(args,o)) if chrome is not None: cmd += " -c '%s' " % (chrome) flag_list = [ "report_long_insertions", "report_duplications", "report_inversions", "report_breakpoints", "report_close_mapped_reads", "report_only_close_mapped_reads", "report_interchromosomal_events", "IndelCorrection", "NormalSamples", "DD_REPORT_DUPLICATION_READS" ] for f in flag_list: if getattr(args, f): cmd += (" --%s" % (f)) if args.detect_DD: cmd += ' -q ' cmd += ' --MAX_DD_BREAKPOINT_DISTANCE '+str(args.MAX_DD_BREAKPOINT_DISTANCE) cmd += ' --MAX_DISTANCE_CLUSTER_READS '+str(args.MAX_DISTANCE_CLUSTER_READS) cmd += ' --MIN_DD_CLUSTER_SIZE '+str(args.MIN_DD_CLUSTER_SIZE) cmd += ' --MIN_DD_BREAKPOINT_SUPPORT '+str(args.MIN_DD_BREAKPOINT_SUPPORT) cmd += ' --MIN_DD_MAP_DISTANCE '+str(args.MIN_DD_MAP_DISTANCE) return (cmd, pindel_file_base ) def move(avant, apres): if os.path.exists(avant): execute("mv %s %s" %(avant, apres)) def pindel2vcf(inputFastaFile, refName, pindel_file, vcf_file): date = str(time.strftime('%d/%m/%y',time.localtime())) cmd = "pindel2vcf -p %s -r %s -R %s -d %s -v %s -he 0.05 -ho 0.95 -G" % (pindel_file, inputFastaFile, refName, date, vcf_file) # Added hard-coded parameters. JHL return cmd def which(cmd): cmd = ["which",cmd] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) res = p.stdout.readline().rstrip() if len(res) == 0: return None return res def get_bam_seq(inputBamFile, min_size): ### Changed min_size to 40mil. JHL samtools = which("samtools") cmd = [samtools, "idxstats", inputBamFile] process = subprocess.Popen(args=cmd, stdout=subprocess.PIPE) stdout, stderr = process.communicate() seqs = [] for line in stdout.split("\n"): tmp = line.split("\t") if len(tmp) == 4 and int(tmp[1]) >= min_size: seqs.append(tmp[0]) return seqs def getMeanInsertSize(bamFile): logging.info("Getting insert size of %s" % (bamFile)) cmd = "samtools view -f66 %s | head -n 1000000" % (bamFile) process = subprocess.Popen(args=cmd, shell=True, stdout=subprocess.PIPE) b_sum = 0L b_count = 0L while True: line = process.stdout.readline() if not line: break tmp = line.split("\t") if abs(long(tmp[8])) < 10000: b_sum += abs(long(tmp[8])) b_count +=1 process.wait() if b_count == 0: mean = 200 else: mean = b_sum / b_count print "Using insert size: %d" % (mean) return mean def __main__(): logging.basicConfig(level=logging.INFO) time.sleep(1) #small hack, sometimes it seems like docker file systems aren't avalible instantly parser = argparse.ArgumentParser(description='') parser.add_argument('-r', dest='inputFastaFile', required=True, help='the reference file') parser.add_argument('-R', dest='inputFastaName', default="genome", help='the reference name') parser.add_argument('-b', dest='inputBamFiles', default=[], action="append", help='the bam file') parser.add_argument('-bi', dest='inputBamFileIndexes', default=[], action="append", help='the bam file') parser.add_argument('-s', dest='insert_sizes', type=int, default=[], action="append", required=False, help='the insert size') parser.add_argument('-t', dest='sampleTags', default=[], action="append", help='the sample tag') parser.add_argument('-o1', dest='outputRaw', help='the output raw', default=None) parser.add_argument('-o2', dest='outputVcfFile', help='the output vcf', default=None) parser.add_argument('-o3', dest='outputSomaticVcfFile', help='the output somatic filtered vcf', default=None) parser.add_argument('--number_of_threads', dest='number_of_threads', type=int, default=2) parser.add_argument('--number_of_procs', dest='procs', type=int, default=1) parser.add_argument('--breakdancer', dest='breakdancer') parser.add_argument('-x', '--max_range_index', dest='max_range_index', type=int, default=None) parser.add_argument('--window_size', dest='window_size', type=int, default=None) parser.add_argument('--sequencing_error_rate', dest='sequencing_error_rate', type=float, default=None) parser.add_argument('--sensitivity', dest='sensitivity', default=None, type=float) parser.add_argument('--report_long_insertions', dest='report_long_insertions', action='store_true', default=False) parser.add_argument('--report_duplications', dest='report_duplications', action='store_true', default=False) parser.add_argument('--report_inversions', dest='report_inversions', action='store_true', default=False) parser.add_argument('--report_breakpoints', dest='report_breakpoints', action='store_true', default=False) parser.add_argument('-u', '--maximum_allowed_mismatch_rate', dest='maximum_allowed_mismatch_rate', type=float, default=None) parser.add_argument('--report_close_mapped_reads', dest='report_close_mapped_reads', action='store_true', default=False) parser.add_argument('--report_only_close_mapped_reads', dest='report_only_close_mapped_reads', action='store_true', default=False) parser.add_argument('--report_interchromosomal_events', dest='report_interchromosomal_events', action='store_true', default=False) parser.add_argument('--IndelCorrection', dest='IndelCorrection', action='store_true', default=False) parser.add_argument('--NormalSamples', dest='NormalSamples', action='store_true', default=False) parser.add_argument('-a', '--additional_mismatch', dest='additional_mismatch', type=int, default=None) parser.add_argument('-m', '--min_perfect_match_around_BP', dest='min_perfect_match_around_BP', type=int, default=None) parser.add_argument('-v', '--min_inversion_size', dest='min_inversion_size', type=int, default=None) parser.add_argument('-d', '--min_num_matched_bases', dest='min_num_matched_bases', type=int, default=None) parser.add_argument('-B', '--balance_cutoff', dest='balance_cutoff', type=int, default=None) parser.add_argument('-A', '--anchor_quality', dest='anchor_quality', type=int, default=None) parser.add_argument('-M', '--minimum_support_for_event', dest='minimum_support_for_event', type=int, default=None) parser.add_argument('-n', '--NM', dest='NM', type=int, default=None) parser.add_argument('--detect_DD', dest='detect_DD', action='store_true', default=False) parser.add_argument('--MAX_DD_BREAKPOINT_DISTANCE', dest='MAX_DD_BREAKPOINT_DISTANCE', type=int, default='350') parser.add_argument('--MAX_DISTANCE_CLUSTER_READS', dest='MAX_DISTANCE_CLUSTER_READS', type=int, default='100') parser.add_argument('--MIN_DD_CLUSTER_SIZE', dest='MIN_DD_CLUSTER_SIZE', type=int, default='3') parser.add_argument('--MIN_DD_BREAKPOINT_SUPPORT', dest='MIN_DD_BREAKPOINT_SUPPORT', type=int, default='3') parser.add_argument('--MIN_DD_MAP_DISTANCE', dest='MIN_DD_MAP_DISTANCE', type=int, default='8000') parser.add_argument('--DD_REPORT_DUPLICATION_READS', dest='DD_REPORT_DUPLICATION_READS', action='store_true', default=False) parser.add_argument('--somatic_vaf', type=float, default=0.08) parser.add_argument('--somatic_cov', type=int, default=20) parser.add_argument('--somatic_hom', type=int, default=6) parser.add_argument("-J", "--exclude", dest="exclude", default=None) parser.add_argument("-j", "--include", dest="include", default=None) parser.add_argument('--min_chrom_size', dest='min_chrom_size', type=int, default='1') parser.add_argument('-z', '--input_SV_Calls_for_assembly', dest='input_SV_Calls_for_assembly', action='store_true', default=False) parser.add_argument('--workdir', default="./") parser.add_argument('--no_clean', action="store_true", default=False) args = parser.parse_args() inputBamFiles = list( os.path.abspath(a) for a in args.inputBamFiles ) if len(inputBamFiles) == 0: logging.error("Need input files") sys.exit(1) inputBamFileIndexes = list( os.path.abspath(a) for a in args.inputBamFileIndexes ) if len(inputBamFileIndexes) == 0: inputBamFileIndexes = [None] * len(inputBamFiles) if len(inputBamFileIndexes) != len(inputBamFiles): logging.error("Index file count needs to undefined or match input file count") sys.exit(1) insertSizes = args.insert_sizes if len(insertSizes) == 0: insertSizes = [None] * len(inputBamFiles) if len(insertSizes) != len(inputBamFiles): logging.error("Insert Sizes needs to undefined or match input file count") sys.exit(1) sampleTags = args.sampleTags if len(sampleTags) != len(inputBamFiles): logging.error("Sample Tags need to match input file count") sys.exit(1) tempDir = tempfile.mkdtemp(dir=args.workdir, prefix="pindel_work_") print(tempDir) try: meanInsertSizes = [] seq_hash = {} newInputFiles = [] i = 0 #make sure the BAMs are indexed and get the mean insert sizes for inputBamFile, inputBamIndex, insertSize, sampleTag in zip(inputBamFiles, inputBamFileIndexes, insertSizes, sampleTags ): inputFastaFile, inputBamFile = indexBam(args.workdir, args.inputFastaFile, inputBamFile, i, inputBamIndex) i += 1 newInputFiles.append(inputBamFile) if insertSize==None: meanInsertSize = getMeanInsertSize(inputBamFile) else: meanInsertSize=insertSize meanInsertSizes.append( meanInsertSize ) for seq in get_bam_seq(inputBamFile, args.min_chrom_size): seq_hash[seq] = True seqs = seq_hash.keys() configFile = config(newInputFiles, meanInsertSizes, sampleTags, tempDir) #run pindel pindel_files = [] if args.procs == 1: cmd, pindelFileBase = pindel(inputFastaFile, configFile, args, tempDir) execute(cmd) for suffix in ["_D", "_SI", "_LI", "_INV", "_TD"]: if os.path.exists(pindelFileBase + suffix): pindel_files.append( pindelFileBase + suffix ) else: cmds = [] runs = [] for a in seqs: cmd, pindelFileBase = pindel(inputFastaFile, configFile, args, tempDir, a) cmds.append(cmd) runs.append(pindelFileBase) p = Pool(args.procs) values = p.map(execute, cmds, 1) for pindelFileBase in runs: for suffix in ["_D", "_SI", "_LI", "_INV", "_TD"]: if os.path.exists(pindelFileBase + suffix): pindel_files.append( pindelFileBase + suffix ) #run pindel2vcf with open(os.path.join(args.workdir, "pindel_all"), "w") as handle: for p in pindel_files: with open(p) as ihandle: for line in ihandle: handle.write(line) if args.outputRaw is not None: shutil.copy(os.path.join(args.workdir, "pindel_all"), args.outputRaw) if args.outputVcfFile is not None: cmd = pindel2vcf(inputFastaFile, args.inputFastaName, os.path.join(args.workdir, "pindel_all"), args.outputVcfFile) execute(cmd) if args.outputSomaticVcfFile is not None: with open(os.path.join(args.workdir, "pindel_somatic"), "w") as handle: for p in pindel_files: if p.endswith("_D"): with open(p) as ihandle: for line in ihandle: if re.search("ChrID", line): handle.write(line) for p in pindel_files: if p.endswith("_SI"): with open(p) as ihandle: for line in ihandle: if re.search("ChrID", line): handle.write(line) with open(os.path.join(args.workdir, "somatic.indel.filter.config"), "w") as handle: handle.write("indel.filter.input = %s\n" % os.path.join(args.workdir, "pindel_somatic")) handle.write("indel.filter.vaf = %s\n" % (args.somatic_vaf)) handle.write("indel.filter.cov = %s\n" % (args.somatic_cov)) handle.write("indel.filter.hom = %s\n" % (args.somatic_hom)) handle.write("indel.filter.pindel2vcf = %s\n" % (which("pindel2vcf"))) handle.write("indel.filter.reference = %s\n" % (inputFastaFile)) handle.write("indel.filter.referencename = %s\n" % (args.inputFastaName)) handle.write("indel.filter.referencedate = %s\n" % (datetime.datetime.now().strftime("%Y%m%d")) ) handle.write("indel.filter.output = %s\n" % (args.outputSomaticVcfFile)) # The hard-coded paths need to be removed. execute("%s ~/bin/somatic_indelfilter.pl %s" % (which("perl"), os.path.join(args.workdir, "somatic.indel.filter.config")) ) finally: if not args.no_clean and os.path.exists(tempDir): shutil.rmtree(tempDir) if __name__=="__main__": __main__()<|fim▁end|>
cmd = "samtools index %s" %(inputBamLink)
<|file_name|>dodata.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- #################### 本文件用于进行基本的 json urlencode 操作 import sys,re import json from jsonpath_rw import jsonpath, parse # pip2/pip3 install jsonpath_rw from lxml import etree import platform sysstr = platform.system() ### 判断操作系统类型 Windows Linux . 本脚本函数入口, 统一以 LINUX 为准, 其后在函数内进行转换 ######################################### JSON # jsonpath 可以通过 firefox JSON-handle 插件获得, 或 https://jsonpath.curiousconcept.com/ jsondoc=None ##################### 返回对应 json 节点 def jsonnode(jsonstr,jsonpath): if jsonpath[0:5]=="JSON.": ## 适应 JSON-handle 的 json path jsonpath="$." + jsonpath[5:] if jsonpath[:1]=="@": ## 直接叶子写法 jsonpath="$.." + jsonpath[1:] + "[0]" #print(jsonpath) try: jsonpath_expr = parse(jsonpath) except: return None global jsondoc jsondoc = json.loads(jsonstr) for match in jsonpath_expr.find(jsondoc): node=match return node ##################### JSON 读取指定节点 def readjson_old(jsonstr,jsonpath): ##### jsonpath_rw 中文内容节点的取值有点问题, 原始方法弃用 node=jsonnode(jsonstr,jsonpath) try: value=node.value except: print(u"没有找到JSON节点: " + jsonpath) value="" return value def initjson(jsonstr,jsonpath): # 初始化 json 字符串 node=jsonnode(jsonstr,jsonpath) ### jsonpath_rw 中对应功能未实现写操作 ### node.value=value ############# 使用正则处理 #print(dir(jsondoc)) if sys.version_info.major==2: jsonstr=str(jsondoc).decode('unicode_escape') else: jsonstr=str(jsondoc) jsonstr=jsonstr.replace("u'","'") return(jsonstr,node) def leftjsonpos(jsonstr,jsonpath): # 对应jsonpath值前的左侧位置 (jsonstr,node)=initjson(jsonstr,jsonpath) try: repath=str(node.full_path) except: print(u"没有找到JSON节点: " + jsonpath) return(-1,"") #print(repath) ### 通过正则表达式 if repath[len(repath)-4:]==".[0]": # 叶子节点的 .[0] repath=repath[:len(repath)-4] #print(jsonstr) repathlist=repath.split(".") #print(repathlist) #左侧特征举例 (.*l1('|\"):(.*).*('|\")l1_1('|\"): ('|\"|\[\"|\[\')) #{'l2': {'l2_3': {}, 'l2_2': True, 'l2_1': None}, 'l1': {'l1_1': ['中文测试', 'l1_1_2'], 'l1_2': {'l1_2_1': 121}}} repath= "('|\"):(.*).*('|\")".join(repathlist) ## 单引号或双引号 repath=".*" + repath + "('|\"): ('|\"|\[\"|\[\')" ### 几种可能 ' " [" [' #repath=".*" + repath + "('|\"): ('|\")" repath="(" + repath + ")" ## 左侧特征 #print(repath) matchs=re.match(repath,jsonstr,re.DOTALL) ### 最后一个参数解决换行问题 if matchs!=None: leftstr=matchs.groups()[0] ## 左侧串 #print(leftstr) return(len(leftstr),leftstr) else: ### 没找到 print("没有找到JSON节点左侧边缘:" + jsonpath) return -1 def rightjsonpos(jsonstr,jsonpath): # 对应jsonpath值后的右侧位置 (left,leftstr)=leftjsonpos(jsonstr,jsonpath) ## 左侧串位置 if left==-1: print("没有找到JSON节点左侧边缘:" + jsonpath) return -1 (jsonstr,node)=initjson(jsonstr,jsonpath) ## 格式化总体串(取位置,必须按格式化串操作) ### 右侧单引号或双引号 pos1=jsonstr.find("'", left+1) pos2=jsonstr.find("\"", left+1) if pos2==-1: rightstr=jsonstr[pos1:] ## 右侧串 pos=pos1 elif pos1==-1: rightstr=jsonstr[pos2:] pos=pos2 elif pos1<pos2: rightstr=jsonstr[pos1:] pos=pos1 else: rightstr=jsonstr[pos2:] pos=pos2 return(pos,rightstr) def readjson(jsonstr,jsonpath): (left,leftstr)=leftjsonpos(jsonstr,jsonpath) ## 左侧串位置 if left==-1: return "" (right,rightstr)=rightjsonpos(jsonstr,jsonpath) ## 右侧串位置 if right==-1: return "" (jsonstr,node)=initjson(jsonstr,jsonpath) ## 格式化总体串(取位置,必须按格式化串操作) #print(left) #print(right) ret=jsonstr[left:right] return ret #### 第二种纯字符串方法得到某个叶子节点方法, 相对稳健,只支持 @ 写法 def readjson_once(jsonstr,jsonpath): if jsonpath[:1]=="@": jsonpath=jsonpath[1:] #print(jsonpath) pos=jsonstr.find("\""+jsonpath+"\"") tempstr=jsonstr[pos+len(jsonpath):] #查找并截取到尾部 #print(tempstr) pos1=tempstr.find("}") pos2=tempstr.find(",") if pos1==-1: pos=pos2 elif pos2==-1: pos=pos1 else: pos=min(pos1,pos2) # 结束出现的最先有效位置 #print(pos) tempstr=tempstr[2:pos] #print(tempstr) tempstr=tempstr.replace(":","") #print(tempstr) if tempstr.find("\"")==-1: # 数字类型 text=str(int(tempstr)) else: #字符串 pos1=tempstr.find("\"") pos2=tempstr[pos1+1:].find("\"") #print(pos1,pos2) text=tempstr[pos1+1:pos2+2] return text ##################### JSON 写入指定节点 ----------------- 多个节点 [n] 暂时不能处理 def writejson(jsonstr, jsonpath, value): (left,leftstr)=leftjsonpos(jsonstr,jsonpath) ## 左侧串 if left==-1: return jsonstr (right,rightstr)=rightjsonpos(jsonstr,jsonpath) ## 右侧串 if right==-1: return jsonstr #print(left) #print(right) #print("left: " + leftstr) #print("right: " +rightstr) res=leftstr +value + rightstr ## 左右拼加 ### json 单引号变双引号 res=res.replace("'","\"") return res def writejson_ffile(files, jsonpath, value): ### 从 json 文件读取 并修改对应的值 data=open(files).read() jsonstr=writejson(data, jsonpath, value) return jsonstr ################################### URLCODE def writeurlcode(data, path, value): ##### 修改某个值 vardata=path+"=" + readurlcode(data, path) urlcodestr=data.replace(vardata, path+"=" + value) urlcodestr=urlcodestr.replace("\n","") urlcodestr=urlcodestr.replace("\r","") return urlcodestr def readurlcode(data, path): ##### 读取某个值 value="" pos1=data.find(path+"=") #print(pos1) if pos1>=0 and pos1+4<len(data): ## 找到且不在末尾 pos2=data.find("&",pos1+4) #print(pos2) if pos2<0: value=data[pos1+4:] else: value=data[pos1+4:pos2] return value def writeurlcode_ffile(files, path, value): ####### 从文件读取, 然后修改某个值<|fim▁hole|> data=open(files).read() data=data.replace("\n","") data=data.replace("\r","") urlcodestr=writejson(data, path, value) return urlcodestr #################################### HTML def readhtml(data,xpath): etrees=etree.HTML(data) ##### lxml 处理 xpath 特点 xpath=xpath.replace("html/body/","//") ### 不能写 html/body/ ,这是 firebug 的写法特点 xpaths=xpath.replace("/tbody/","/") ### 去掉所有 tbody #print(xpaths) ele= etrees.xpath(xpaths) if len(ele)==0: ### 不能用 None 判断 xpaths=xpath ### 不去掉 ele= etrees.xpath(xpaths) ##### types="" ## 暂时只支持取 text ##### try: if types=="": values=ele[0].text ### 元素的 text else: values=str(etrees.xpath(xpaths+ "/@" + types)[0]) ### 元素的对应属性 #print(values) except: #print(u"** 数据截获异常 **") values="" ## 没有这个元素则返回为空 if values==None or len(ele)==0: print("HTML节点: " +xpath +" 查找失败.") values="" return values ################################### 自动区分类型 def whichtypes(data): ## 判断类型 #print(data) xmlre=data.count('<') jsonre=data.count('{') urlcodere=data.count('=') data=data.strip() types="" if data[:6]=="<?xml ": types="xml" elif data.find("<html")>=0 and data.find("</html>")>0: types="html" elif xmlre>jsonre and xmlre>urlcodere: types="xml" elif jsonre>xmlre and jsonre>urlcodere: types="json" elif urlcodere>xmlre and urlcodere>jsonre: types="urlcode" #print(types) return types def writenode(data, path,value): ## 判断类型 types=whichtypes(data) if types=="xml": data=writexml(data, path, value) if types=="json": data=writejson(data, path, value) if types=="urlcode": data=writeurlcode(data, path, value) if types=="html": hues.error("html格式只支持读节点") return data def readnode(data,path): ## 判断类型 types=whichtypes(data) if types=="": print("数据类型识别错误") return "" value="" if types=="xml": value=readxml(data,path) if types=="json": value=readjson(data,path) if types=="urlcode": value=readurlcode(data, path) if types=="html": value=readhtml(data,path) return value def writenode_ffile(files, path, value): ## 判断类型 data=open(files).read() types=whichtypes(data) if types=="": hues.error("文件类型识别错误") return "" if types=="xml": data=writexml_ffile(files, path, value) if types=="json": data=writejson_ffile(files, path, value) if types=="urlcode": data=writeurlcode_ffile(files, path, value) if types=="html": hues.error("html格式只支持读节点") return data<|fim▁end|>
<|file_name|>ThresholdEvaluatorRelativeChange.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2007-2012 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.netmgt.threshd; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.opennms.netmgt.EventConstants; import org.opennms.netmgt.xml.event.Event; import org.springframework.util.Assert; /** * Implements a relative change threshold check. A 'value' setting of * less than 1.0 means that a threshold will fire if the current value * is less than or equal to the previous value multiplied by the 'value' * setting. A 'value' setting greater than 1.0 causes the threshold to * fire if the current value is greater than or equal to the previous * value multiplied by the 'value' setting. A 'value' setting of 1.0 * (unity) is not allowed, as it represents no change. Zero valued * samples (0.0) are ignored, as 0.0 multiplied by anything is 0.0 (if * they were not ignored, an interface that gets no traffic would always * trigger a threshold, for example). * * @author ranger * @version $Id: $ */ public class ThresholdEvaluatorRelativeChange implements ThresholdEvaluator { private static final String TYPE = "relativeChange"; /** {@inheritDoc} */ @Override public ThresholdEvaluatorState getThresholdEvaluatorState(BaseThresholdDefConfigWrapper threshold) { return new ThresholdEvaluatorStateRelativeChange(threshold); } /** {@inheritDoc} */ @Override public boolean supportsType(String type) { return TYPE.equals(type); } public static class ThresholdEvaluatorStateRelativeChange extends AbstractThresholdEvaluatorState { private BaseThresholdDefConfigWrapper m_thresholdConfig; private double m_multiplier; private double m_lastSample = 0.0; private double m_previousTriggeringSample; public ThresholdEvaluatorStateRelativeChange(BaseThresholdDefConfigWrapper threshold) { Assert.notNull(threshold, "threshold argument cannot be null"); setThresholdConfig(threshold); } public void setThresholdConfig(BaseThresholdDefConfigWrapper thresholdConfig) { Assert.notNull(thresholdConfig.getType(), "threshold must have a 'type' value set"); Assert.notNull(thresholdConfig.getDatasourceExpression(), "threshold must have a 'ds-name' value set"); Assert.notNull(thresholdConfig.getDsType(), "threshold must have a 'ds-type' value set"); Assert.isTrue(thresholdConfig.hasValue(), "threshold must have a 'value' value set"); Assert.isTrue(thresholdConfig.hasRearm(), "threshold must have a 'rearm' value set"); Assert.isTrue(thresholdConfig.hasTrigger(), "threshold must have a 'trigger' value set"); Assert.isTrue(TYPE.equals(thresholdConfig.getType()), "threshold for ds-name '" + thresholdConfig.getDatasourceExpression() + "' has type of '" + thresholdConfig.getType() + "', but this evaluator only supports thresholds with a 'type' value of '" + TYPE + "'"); Assert.isTrue(!Double.isNaN(thresholdConfig.getValue()), "threshold must have a 'value' value that is a number"); Assert.isTrue(thresholdConfig.getValue() != Double.POSITIVE_INFINITY && thresholdConfig.getValue() != Double.NEGATIVE_INFINITY, "threshold must have a 'value' value that is not positive or negative infinity"); Assert.isTrue(thresholdConfig.getValue() != 1.0, "threshold must not be unity (1.0)"); m_thresholdConfig = thresholdConfig; setMultiplier(thresholdConfig.getValue()); } @Override public BaseThresholdDefConfigWrapper getThresholdConfig() { return m_thresholdConfig; } @Override public Status evaluate(double dsValue) { //Fix for Bug 2275 so we handle negative numbers //It will not handle values which cross the 0 boundary (from - to +, or v.v.) properly, but // after some discussion, we can't come up with a sensible scenario when that would actually happen. // If such a scenario eventuates, reconsider dsValue=Math.abs(dsValue); if (getLastSample() != 0.0) { double threshold = getMultiplier() * getLastSample(); if (getMultiplier() < 1.0) { if (dsValue <= threshold) { setPreviousTriggeringSample(getLastSample()); setLastSample(dsValue); return Status.TRIGGERED; } } else { if (dsValue >= threshold) { setPreviousTriggeringSample(getLastSample()); setLastSample(dsValue); return Status.TRIGGERED; } } setLastSample(dsValue); } setLastSample(dsValue); return Status.NO_CHANGE; } public Double getLastSample() { return m_lastSample; } public void setLastSample(double lastSample) { m_lastSample = lastSample; } @Override public Event getEventForState(Status status, Date date, double dsValue, CollectionResourceWrapper resource) { if (status == Status.TRIGGERED) { String uei=getThresholdConfig().getTriggeredUEI(); if(uei==null || "".equals(uei)) { uei=EventConstants.RELATIVE_CHANGE_THRESHOLD_EVENT_UEI; }<|fim▁hole|> } private Event createBasicEvent(String uei, Date date, double dsValue, CollectionResourceWrapper resource) { Map<String,String> params = new HashMap<String,String>(); params.put("previousValue", formatValue(getPreviousTriggeringSample())); params.put("multiplier", Double.toString(getThresholdConfig().getValue())); // params.put("trigger", Integer.toString(getThresholdConfig().getTrigger())); // params.put("rearm", Double.toString(getThresholdConfig().getRearm())); return createBasicEvent(uei, date, dsValue, resource, params); } public double getPreviousTriggeringSample() { return m_previousTriggeringSample; } public void setPreviousTriggeringSample(double previousTriggeringSample) { m_previousTriggeringSample = previousTriggeringSample; } public double getMultiplier() { return m_multiplier; } public void setMultiplier(double multiplier) { m_multiplier = multiplier; } @Override public ThresholdEvaluatorState getCleanClone() { return new ThresholdEvaluatorStateRelativeChange(m_thresholdConfig); } // FIXME This must be implemented correctly @Override public boolean isTriggered() { return false; } // FIXME This must be implemented correctly @Override public void clearState() { } } }<|fim▁end|>
return createBasicEvent(uei, date, dsValue, resource); } else { return null; }
<|file_name|>array-index-is-temporary.rs<|end_file_name|><|fim▁begin|>// Retagging (from Stacked Borrows) relies on the array index being a fresh // temporary, so that side-effects cannot change it. // Test that this is indeed the case. unsafe fn foo(z: *mut usize) -> u32 { *z = 2; 99 } fn main() { let mut x = [42, 43, 44]; let mut y = 1; let z: *mut usize = &mut y;<|fim▁hole|>// START rustc.main.EraseRegions.after.mir // bb0: { // ... // _6 = &mut _2; // _5 = &mut (*_6); // _4 = move _5 as *mut usize (Misc); // _3 = move _4; // ... // _8 = _3; // _7 = const foo(move _8) -> bb1; // } // // bb1: { // ... // _9 = _2; // _10 = Len(_1); // _11 = Lt(_9, _10); // assert(move _11, "index out of bounds: the len is move _10 but the index is _9") -> bb2; // } // // bb2: { // _1[_9] = move _7; // ... // return; // } // END rustc.main.EraseRegions.after.mir<|fim▁end|>
x[y] = unsafe { foo(z) }; } // END RUST SOURCE
<|file_name|>PostActionButton.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import PropTypes from 'prop-types'; import Button from 'react-bootstrap/lib/Button'; import Glyphicon from 'react-bootstrap/lib/Glyphicon'; import { post } from '../../api'; class PostActionButton extends Component { static propTypes = { bsStyle: PropTypes.string.isRequired, children: PropTypes.node.isRequired, hideContentOnAction: PropTypes.bool.isRequired, action: PropTypes.string.isRequired, body: PropTypes.object.isRequired, onSuccess: PropTypes.func, onError: PropTypes.func, }; static defaultProps = { body: {}, hideContentOnAction: false, onSuccess: () => {}, onError: () => {}, }; constructor(props) { super(props); this.state = { isWorking: false }; } onClick = () => { this.setState({ isWorking: true }); const { body, action, onSuccess, onError } = this.props; post(action, body).then(() => { this.setState({ isWorking: false }); onSuccess(); }, (err) => { console.error(err); onError(err); }); }; render() { const { isWorking } = this.state; const { hideContentOnAction, bsStyle, children } = this.props; const renderChildren = !isWorking || (isWorking && !hideContentOnAction); return ( <Button onClick={this.onClick} bsStyle={bsStyle}><|fim▁hole|> </Button> ); } } export default PostActionButton;<|fim▁end|>
{isWorking && <Glyphicon glyph="refresh" className="glyphicon-spin" /> } {renderChildren && children}
<|file_name|>upload.controller.js<|end_file_name|><|fim▁begin|>var path = require('path'); var Q = require('q'); var fs = require('fs'); var mv = require('mv'); var Upload = require('./upload.model'); exports.upload = function (req, res) { var tmpPath = req.files[0].path; var newFileName = Math.random().toString(36).substring(7)+path.extname(tmpPath); var targetPath = path.resolve(process.env.UPLOAD_PATH, newFileName); var defer = Q.defer(); mv(tmpPath, targetPath, function (err) { if (err) { return nextIteration.reject(err); } targetPath = targetPath.substring(targetPath.indexOf('upload')); Upload.createUpload(targetPath).then(function(upload) { defer.resolve(upload); }, function(err) { defer.reject(err); }); }); defer.promise.then(function (upload) { res.json({ status: true, data: upload._id }); }, function(err) { console.log(err); res.json({ status: false,<|fim▁hole|>};<|fim▁end|>
reason: err.toString() }); });
<|file_name|>stubs.py<|end_file_name|><|fim▁begin|>import logging import os import time from urllib2 import HTTPError, URLError from djangoappengine.boot import PROJECT_DIR from djangoappengine.utils import appid, have_appserver REMOTE_API_SCRIPTS = ( '$PYTHON_LIB/google/appengine/ext/remote_api/handler.py', 'google.appengine.ext.remote_api.handler.application', ) def auth_func(): import getpass return raw_input("Login via Google Account (see note above if login fails): "), getpass.getpass("Password: ") def rpc_server_factory(*args, ** kwargs): from google.appengine.tools import appengine_rpc kwargs['save_cookies'] = True return appengine_rpc.HttpRpcServer(*args, ** kwargs) class StubManager(object): def __init__(self): self.testbed = None self.active_stubs = None self.pre_test_stubs = None def setup_stubs(self, connection): if self.active_stubs is not None: return if not have_appserver: self.activate_stubs(connection) def activate_stubs(self, connection): try: from google.appengine.tools import dev_appserver_main self.setup_local_stubs(connection) except ImportError: self.activate_test_stubs(connection) def reset_stubs(self, connection, datastore_path=None): if self.active_stubs == 'test': self.deactivate_test_stubs() self.activate_test_stubs(connection, datastore_path) elif self.active_stubs == 'local': self.setup_local_stubs(connection) elif self.active_stubs == 'remote': self.setup_remote_stubs(connection) def activate_test_stubs(self, connection, datastore_path=None): if self.active_stubs == 'test': return if self.testbed is None: from google.appengine.ext.testbed import Testbed self.testbed = Testbed() self.testbed.activate() self.pre_test_stubs = self.active_stubs self.active_stubs = 'test' os.environ['APPLICATION_ID'] = 'dev~' + appid os.environ['HTTP_HOST'] = "%s.appspot.com" % appid appserver_opts = connection.settings_dict.get('DEV_APPSERVER_OPTIONS', {}) high_replication = appserver_opts.get('high_replication', False) require_indexes = appserver_opts.get('require_indexes', False) use_sqlite = appserver_opts.get('use_sqlite', False) datastore_opts = {'require_indexes': require_indexes, 'use_sqlite': use_sqlite} if high_replication: from google.appengine.datastore import datastore_stub_util datastore_opts['consistency_policy'] = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1) self.testbed.init_datastore_v3_stub(datastore_file=datastore_path, **datastore_opts) self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(auto_task_running=True, root_path=PROJECT_DIR) self.testbed.init_urlfetch_stub() self.testbed.init_user_stub() self.testbed.init_xmpp_stub() self.testbed.init_channel_stub() self.testbed.init_app_identity_stub() self.testbed.init_blobstore_stub() self.testbed.init_files_stub() self.testbed.init_images_stub() def deactivate_test_stubs(self): if self.active_stubs == 'test': self.testbed.deactivate() self.active_stubs = self.pre_test_stubs def setup_local_stubs(self, connection): if self.active_stubs == 'local': return from .base import get_datastore_paths from google.appengine.tools import dev_appserver_main args = dev_appserver_main.DEFAULT_ARGS.copy() args.update(get_datastore_paths(connection.settings_dict)) args.update(connection.settings_dict.get('DEV_APPSERVER_OPTIONS', {})) log_level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(logging.WARNING) try: from google.appengine.tools import dev_appserver except ImportError: from google.appengine.tools import old_dev_appserver as dev_appserver dev_appserver.SetupStubs('dev~' + appid, **args) logging.getLogger().setLevel(log_level) self.active_stubs = 'local' def setup_remote_stubs(self, connection): if self.active_stubs == 'remote': return if not connection.remote_api_path: from djangoappengine.utils import appconfig from google.appengine.api import appinfo default_module = next(m for m in appconfig.modules if m.module_name == appinfo.DEFAULT_MODULE) for handler in default_module.handlers: if handler.script in REMOTE_API_SCRIPTS: connection.remote_api_path = handler.url.split('(', 1)[0] break server = '%s.%s' % (connection.remote_app_id, connection.domain) remote_url = 'https://%s%s' % (server, connection.remote_api_path) logging.info("Setting up remote_api for '%s' at %s." % (connection.remote_app_id, remote_url)) if not have_appserver: logging.info( "Connecting to remote_api handler.\n\n" "IMPORTANT: Check your login method settings in the " "App Engine Dashboard if you have problems logging in. " "Login is only supported for Google Accounts.") from google.appengine.ext.remote_api import remote_api_stub remote_api_stub.ConfigureRemoteApi(None, connection.remote_api_path, auth_func, servername=server, secure=connection.secure_remote_api, rpc_server_factory=rpc_server_factory) retry_delay = 1 while retry_delay <= 16: try: remote_api_stub.MaybeInvokeAuthentication() except HTTPError, e: if not have_appserver: logging.info("Retrying in %d seconds..." % retry_delay) time.sleep(retry_delay) retry_delay *= 2 else:<|fim▁hole|> else: try: remote_api_stub.MaybeInvokeAuthentication() except HTTPError, e: raise URLError("%s\n" "Couldn't reach remote_api handler at %s.\n" "Make sure you've deployed your project and " "installed a remote_api handler in app.yaml. " "Note that login is only supported for " "Google Accounts. Make sure you've configured " "the correct authentication method in the " "App Engine Dashboard." % (e, remote_url)) logging.info("Now using the remote datastore for '%s' at %s." % (connection.remote_app_id, remote_url)) self.active_stubs = 'remote' stub_manager = StubManager()<|fim▁end|>
break
<|file_name|>Dashboard.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; import { experimentalStyled as styled } from '@material-ui/core/styles'; import CssBaseline from '@material-ui/core/CssBaseline'; import MuiDrawer from '@material-ui/core/Drawer'; import Box from '@material-ui/core/Box'; import MuiAppBar, { AppBarProps as MuiAppBarProps } from '@material-ui/core/AppBar'; import Toolbar from '@material-ui/core/Toolbar'; import List from '@material-ui/core/List'; import Typography from '@material-ui/core/Typography'; import Divider from '@material-ui/core/Divider'; import IconButton from '@material-ui/core/IconButton'; import Badge from '@material-ui/core/Badge'; import Container from '@material-ui/core/Container'; import Grid from '@material-ui/core/Grid'; import Paper from '@material-ui/core/Paper'; import Link from '@material-ui/core/Link'; import MenuIcon from '@material-ui/icons/Menu'; import ChevronLeftIcon from '@material-ui/icons/ChevronLeft'; import NotificationsIcon from '@material-ui/icons/Notifications'; import { mainListItems, secondaryListItems } from './listItems'; import Chart from './Chart'; import Deposits from './Deposits'; import Orders from './Orders'; function Copyright(props: any) { return ( <Typography variant="body2" color="text.secondary" align="center" {...props}> {'Copyright © '} <Link color="inherit" href="https://material-ui.com/"> Your Website </Link>{' '} {new Date().getFullYear()} {'.'} </Typography> ); } const drawerWidth: number = 240; interface AppBarProps extends MuiAppBarProps { open?: boolean; } const AppBar = styled(MuiAppBar, { shouldForwardProp: (prop) => prop !== 'open', })<AppBarProps>(({ theme, open }) => ({ zIndex: theme.zIndex.drawer + 1, transition: theme.transitions.create(['width', 'margin'], { easing: theme.transitions.easing.sharp, duration: theme.transitions.duration.leavingScreen, }), ...(open && { marginLeft: drawerWidth, width: `calc(100% - ${drawerWidth}px)`, transition: theme.transitions.create(['width', 'margin'], { easing: theme.transitions.easing.sharp, duration: theme.transitions.duration.enteringScreen, }), }), })); const Drawer = styled(MuiDrawer, { shouldForwardProp: (prop) => prop !== 'open' })( ({ theme, open }) => ({ '& .MuiDrawer-paper': { position: 'relative', whiteSpace: 'nowrap', width: drawerWidth, transition: theme.transitions.create('width', { easing: theme.transitions.easing.sharp, duration: theme.transitions.duration.enteringScreen, }), boxSizing: 'border-box', ...(!open && { overflowX: 'hidden', transition: theme.transitions.create('width', { easing: theme.transitions.easing.sharp, duration: theme.transitions.duration.leavingScreen, }), width: theme.spacing(7), [theme.breakpoints.up('sm')]: { width: theme.spacing(9), }, }), }, }), ); function DashboardContent() { const [open, setOpen] = React.useState(true); const toggleDrawer = () => { setOpen(!open); }; return ( <Box sx={{ display: 'flex' }}> <CssBaseline /> <AppBar position="absolute" open={open}> <Toolbar sx={{ pr: '24px', // keep right padding when drawer closed }} > <IconButton edge="start" color="inherit" aria-label="open drawer" onClick={toggleDrawer} sx={{ marginRight: '36px', ...(open && { display: 'none' }), }} > <MenuIcon /> </IconButton> <Typography component="h1" variant="h6" color="inherit" noWrap sx={{ flexGrow: 1 }} > Dashboard </Typography> <IconButton color="inherit"> <Badge badgeContent={4} color="secondary"> <NotificationsIcon /> </Badge> </IconButton> </Toolbar> </AppBar> <Drawer variant="permanent" open={open}> <Toolbar sx={{ display: 'flex', alignItems: 'center', justifyContent: 'flex-end', px: [1], }} > <IconButton onClick={toggleDrawer}> <ChevronLeftIcon /> </IconButton> </Toolbar> <Divider /> <List>{mainListItems}</List> <Divider /> <List>{secondaryListItems}</List> </Drawer> <Box component="main" sx={{ backgroundColor: (theme) => theme.palette.mode === 'light' ? theme.palette.grey[100] : theme.palette.grey[900], flexGrow: 1, height: '100vh', overflow: 'auto', }} > <Toolbar /> <Container maxWidth="lg" sx={{ mt: 4, mb: 4 }}> <Grid container spacing={3}> {/* Chart */} <Grid item xs={12} md={8} lg={9}> <Paper sx={{ p: 2, display: 'flex', flexDirection: 'column', height: 240, }} > <Chart /> </Paper> </Grid> {/* Recent Deposits */} <Grid item xs={12} md={4} lg={3}> <Paper sx={{ p: 2, display: 'flex', flexDirection: 'column', height: 240, }} > <Deposits /> </Paper> </Grid> {/* Recent Orders */} <Grid item xs={12}> <Paper sx={{ p: 2, display: 'flex', flexDirection: 'column' }}> <Orders /> </Paper> </Grid> </Grid> <Copyright sx={{ pt: 4 }} /> </Container><|fim▁hole|> ); } export default function Dashboard() { return <DashboardContent />; }<|fim▁end|>
</Box> </Box>
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use tempdir; use libxch; mod util; #[test] fn test_success() { let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1");<|fim▁hole|> util::create_file_with_content(&file2, b"content2").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, &file2).is_ok()); assert!(util::ensure_file_content(&file1, b"content2").expect("Could not read file")); assert!(util::ensure_file_content(&file2, b"content1").expect("Could not read file")); } #[test] fn test_failure() { let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, dir.path()).is_err()); assert!(util::ensure_file_content(&file1, b"content1").expect("Could not read file")); }<|fim▁end|>
let file2 = dir.path().join("file2"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir");
<|file_name|>ApacheConnectionManagerFactoryTest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2011-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.http.apache.client.impl; <|fim▁hole|>import com.amazonaws.http.settings.HttpClientSettings; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.junit.Test; import static org.junit.Assert.assertEquals; public class ApacheConnectionManagerFactoryTest { private final ApacheConnectionManagerFactory factory = new ApacheConnectionManagerFactory(); @Test public void validateAfterInactivityMillis_RespectedInConnectionManager() { final int validateAfterInactivity = 1234; final HttpClientSettings httpClientSettings = HttpClientSettings.adapt(new ClientConfiguration() .withValidateAfterInactivityMillis(validateAfterInactivity)); final PoolingHttpClientConnectionManager connectionManager = (PoolingHttpClientConnectionManager) factory.create(httpClientSettings); assertEquals(validateAfterInactivity, connectionManager.getValidateAfterInactivity()); } }<|fim▁end|>
import com.amazonaws.ClientConfiguration;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from robottelo.decorators.func_shared.shared import ( # noqa shared, SharedFunctionError,<|fim▁hole|><|fim▁end|>
SharedFunctionException, )
<|file_name|>pl.js<|end_file_name|><|fim▁begin|>/*! * FileInput Polish Translations * * This file must be loaded after 'fileinput.js'. Patterns in braces '{}', or * any HTML markup tags in the messages must not be converted or translated. * * @see http://github.com/kartik-v/bootstrap-fileinput * * NOTE: this file must be saved in UTF-8 encoding. */ (function ($) { "use strict"; $.fn.fileinputLocales['pl'] = { fileSingle: 'plik', filePlural: 'pliki', browseLabel: 'Przeglądaj &hellip;', removeLabel: 'Usuń', removeTitle: 'Usuń zaznaczone pliki', cancelLabel: 'Przerwij', cancelTitle: 'Anuluj wysyłanie', pauseLabel: 'Wstrzymaj', pauseTitle: 'Wstrzymaj trwające przesyłanie', uploadLabel: 'Wgraj', uploadTitle: 'Wgraj zaznaczone pliki', msgNo: 'Nie', msgNoFilesSelected: 'Brak zaznaczonych plików', msgPaused: 'Wstrzymano', msgCancelled: 'Odwołany', msgPlaceholder: 'Wybierz {files} ...', msgZoomModalHeading: 'Szczegółowy podgląd', msgFileRequired: 'Musisz wybrać plik do wgrania.', msgSizeTooSmall: 'Plik "{name}" (<b>{size} KB</b>) jest zbyt mały i musi być większy niż <b>{minSize} KB</b>.', msgSizeTooLarge: 'Plik o nazwie "{name}" (<b>{size} KB</b>) przekroczył maksymalną dopuszczalną wielkość pliku wynoszącą <b>{maxSize} KB</b>.', msgFilesTooLess: 'Minimalna liczba plików do wgrania: <b>{n}</b>.', msgFilesTooMany: 'Liczba plików wybranych do wgrania w liczbie <b>({n})</b>, przekracza maksymalny dozwolony limit wynoszący <b>{m}</b>.', msgTotalFilesTooMany: 'Możesz wgrać maksymalnie <b>{m}</b> plików (wykryto <b>{n}</b>).', msgFileNotFound: 'Plik "{name}" nie istnieje!', msgFileSecured: 'Ustawienia zabezpieczeń uniemożliwiają odczyt pliku "{name}".', msgFileNotReadable: 'Plik "{name}" nie jest plikiem do odczytu.', msgFilePreviewAborted: 'Podgląd pliku "{name}" został przerwany.', msgFilePreviewError: 'Wystąpił błąd w czasie odczytu pliku "{name}".', msgInvalidFileName: 'Nieprawidłowe lub nieobsługiwane znaki w nazwie pliku "{name}".', msgInvalidFileType: 'Nieznany typ pliku "{name}". Tylko następujące rodzaje plików są dozwolone: "{types}".', msgInvalidFileExtension: 'Złe rozszerzenie dla pliku "{name}". Tylko następujące rozszerzenia plików są dozwolone: "{extensions}".', msgUploadAborted: 'Przesyłanie pliku zostało przerwane', msgUploadThreshold: 'Przetwarzanie &hellip;', msgUploadBegin: 'Rozpoczynanie &hellip;', msgUploadEnd: 'Gotowe!', msgUploadResume: 'Wznawianie przesyłania &hellip;', msgUploadEmpty: 'Brak poprawnych danych do przesłania.', msgUploadError: 'Błąd przesyłania', msgDeleteError: 'Błąd usuwania', <|fim▁hole|> msgSelected: '{n} Plików zaznaczonych', msgFoldersNotAllowed: 'Metodą przeciągnij i upuść, można przenosić tylko pliki. Pominięto {n} katalogów.', msgImageWidthSmall: 'Szerokość pliku obrazu "{name}" musi być co najmniej {size} px.', msgImageHeightSmall: 'Wysokość pliku obrazu "{name}" musi być co najmniej {size} px.', msgImageWidthLarge: 'Szerokość pliku obrazu "{name}" nie może przekraczać {size} px.', msgImageHeightLarge: 'Wysokość pliku obrazu "{name}" nie może przekraczać {size} px.', msgImageResizeError: 'Nie udało się uzyskać wymiaru obrazu, aby zmienić rozmiar.', msgImageResizeException: 'Błąd podczas zmiany rozmiaru obrazu.<pre>{errors}</pre>', msgAjaxError: 'Coś poczło nie tak podczas {operation}. Spróbuj ponownie!', msgAjaxProgressError: '{operation} nie powiodło się', msgDuplicateFile: 'Plik "{name}" o identycznym rozmiarze "{size} KB" został wgrany wcześniej. Pomijanie zduplikowanego pliku.', msgResumableUploadRetriesExceeded: 'Przekroczono limit <b>{max}</b> prób wgrania pliku <b>{file}</b>! Szczegóły błędu: <pre>{error}</pre>', msgPendingTime: 'Pozostało {time}', msgCalculatingTime: 'obliczanie pozostałego czasu', ajaxOperations: { deleteThumb: 'usuwanie pliku', uploadThumb: 'przesyłanie pliku', uploadBatch: 'masowe przesyłanie plików', uploadExtra: 'przesyłanie danych formularza' }, dropZoneTitle: 'Przeciągnij i upuść pliki tutaj &hellip;', dropZoneClickTitle: '<br>(lub kliknij tutaj i wybierz {files} z komputera)', fileActionSettings: { removeTitle: 'Usuń plik', uploadTitle: 'Przesyłanie pliku', uploadRetryTitle: 'Ponów', downloadTitle: 'Pobierz plik', zoomTitle: 'Pokaż szczegóły', dragTitle: 'Przenies / Ponownie zaaranżuj', indicatorNewTitle: 'Jeszcze nie przesłany', indicatorSuccessTitle: 'Dodane', indicatorErrorTitle: 'Błąd', indicatorPausedTitle: 'Przesyłanie zatrzymane', indicatorLoadingTitle: 'Przesyłanie &hellip;' }, previewZoomButtonTitles: { prev: 'Pokaż poprzedni plik', next: 'Pokaż następny plik', toggleheader: 'Włącz / wyłącz nagłówek', fullscreen: 'Włącz / wyłącz pełny ekran', borderless: 'Włącz / wyłącz tryb bez ramek', close: 'Zamknij szczegółowy widok' } }; })(window.jQuery);<|fim▁end|>
msgProgressError: 'Błąd', msgValidationError: 'Błąd walidacji', msgLoading: 'Wczytywanie pliku {index} z {files} &hellip;', msgProgress: 'Wczytywanie pliku {index} z {files} - {name} - {percent}% zakończone.',
<|file_name|>configs-fn_return_indent-with_where_clause.rs<|end_file_name|><|fim▁begin|>// rustfmt-fn_return_indent: WithWhereClause // Function return type indent fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: Consectetur, adipiscing: Adipiscing) -> Elit where Ipsum: Eq { // body<|fim▁hole|><|fim▁end|>
}
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod resource;<|fim▁hole|>pub mod space;<|fim▁end|>
<|file_name|>graphs.py<|end_file_name|><|fim▁begin|># Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Virtual adversarial text models.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import os # Dependency imports import tensorflow as tf import adversarial_losses as adv_lib import inputs as inputs_lib import layers as layers_lib flags = tf.app.flags FLAGS = flags.FLAGS # Flags governing adversarial training are defined in adversarial_losses.py. # Classifier flags.DEFINE_integer('num_classes', 2, 'Number of classes for classification') # Data path flags.DEFINE_string('data_dir', '/tmp/IMDB', 'Directory path to preprocessed text dataset.') flags.DEFINE_string('vocab_freq_path', None, 'Path to pre-calculated vocab frequency data. If ' 'None, use FLAGS.data_dir/vocab_freq.txt.') flags.DEFINE_integer('batch_size', 64, 'Size of the batch.') flags.DEFINE_integer('num_timesteps', 100, 'Number of timesteps for BPTT') # Model architechture flags.DEFINE_bool('bidir_lstm', False, 'Whether to build a bidirectional LSTM.') flags.DEFINE_bool('single_label', True, 'Whether the sequence has a single ' 'label, for optimization.') flags.DEFINE_integer('rnn_num_layers', 1, 'Number of LSTM layers.') flags.DEFINE_integer('rnn_cell_size', 512, 'Number of hidden units in the LSTM.') flags.DEFINE_integer('cl_num_layers', 1, 'Number of hidden layers of classification model.') flags.DEFINE_integer('cl_hidden_size', 30, 'Number of hidden units in classification layer.') flags.DEFINE_integer('num_candidate_samples', -1, 'Num samples used in the sampled output layer.') flags.DEFINE_bool('use_seq2seq_autoencoder', False, 'If True, seq2seq auto-encoder is used to pretrain. ' 'If False, standard language model is used.') # Vocabulary and embeddings flags.DEFINE_integer('embedding_dims', 256, 'Dimensions of embedded vector.') flags.DEFINE_integer('vocab_size', 86934, 'The size of the vocaburary. This value ' 'should be exactly same as the number of the ' 'vocabulary used in dataset. Because the last ' 'indexed vocabulary of the dataset preprocessed by ' 'my preprocessed code, is always <eos> and here we ' 'specify the <eos> with the the index.') flags.DEFINE_bool('normalize_embeddings', True, 'Normalize word embeddings by vocab frequency') # Optimization flags.DEFINE_float('learning_rate', 0.001, 'Learning rate while fine-tuning.') flags.DEFINE_float('learning_rate_decay_factor', 1.0, 'Learning rate decay factor') flags.DEFINE_boolean('sync_replicas', False, 'sync_replica or not') flags.DEFINE_integer('replicas_to_aggregate', 1, 'The number of replicas to aggregate') # Regularization flags.DEFINE_float('max_grad_norm', 1.0, 'Clip the global gradient norm to this value.') flags.DEFINE_float('keep_prob_emb', 1.0, 'keep probability on embedding layer. ' '0.5 is optimal on IMDB with virtual adversarial training.') flags.DEFINE_float('keep_prob_lstm_out', 1.0, 'keep probability on lstm output.') flags.DEFINE_float('keep_prob_cl_hidden', 1.0, 'keep probability on classification hidden layer') def get_model(): if FLAGS.bidir_lstm: return VatxtBidirModel() else: return VatxtModel() class VatxtModel(object): """Constructs training and evaluation graphs. Main methods: `classifier_training()`, `language_model_training()`, and `eval_graph()`. Variable reuse is a critical part of the model, both for sharing variables between the language model and the classifier, and for reusing variables for the adversarial loss calculation. To ensure correct variable reuse, all variables are created in Keras-style layers, wherein stateful layers (i.e. layers with variables) are represented as callable instances of the Layer class. Each time the Layer instance is called, it is using the same variables. All Layers are constructed in the __init__ method and reused in the various graph-building functions. """ def __init__(self, cl_logits_input_dim=None): self.global_step = tf.train.get_or_create_global_step() self.vocab_freqs = _get_vocab_freqs() # Cache VatxtInput objects self.cl_inputs = None self.lm_inputs = None # Cache intermediate Tensors that are reused self.tensors = {} # Construct layers which are reused in constructing the LM and # Classification graphs. Instantiating them all once here ensures that # variable reuse works correctly. self.layers = {} self.layers['embedding'] = layers_lib.Embedding( FLAGS.vocab_size, FLAGS.embedding_dims, FLAGS.normalize_embeddings, self.vocab_freqs, FLAGS.keep_prob_emb) self.layers['lstm'] = layers_lib.LSTM( FLAGS.rnn_cell_size, FLAGS.rnn_num_layers, FLAGS.keep_prob_lstm_out) self.layers['lm_loss'] = layers_lib.SoftmaxLoss( FLAGS.vocab_size, FLAGS.num_candidate_samples, self.vocab_freqs, name='LM_loss') cl_logits_input_dim = cl_logits_input_dim or FLAGS.rnn_cell_size self.layers['cl_logits'] = layers_lib.cl_logits_subgraph( [FLAGS.cl_hidden_size] * FLAGS.cl_num_layers, cl_logits_input_dim, FLAGS.num_classes, FLAGS.keep_prob_cl_hidden) @property def pretrained_variables(self): return (self.layers['embedding'].trainable_weights + self.layers['lstm'].trainable_weights) def classifier_training(self): loss = self.classifier_graph() train_op = optimize(loss, self.global_step) return train_op, loss, self.global_step def language_model_training(self): loss = self.language_model_graph() train_op = optimize(loss, self.global_step) return train_op, loss, self.global_step def classifier_graph(self): """Constructs classifier graph from inputs to classifier loss. * Caches the VatxtInput object in `self.cl_inputs` * Caches tensors: `cl_embedded`, `cl_logits`, `cl_loss` Returns: loss: scalar float. """ inputs = _inputs('train', pretrain=False) self.cl_inputs = inputs embedded = self.layers['embedding'](inputs.tokens) self.tensors['cl_embedded'] = embedded _, next_state, logits, loss = self.cl_loss_from_embedding( embedded, return_intermediates=True) tf.summary.scalar('classification_loss', loss) self.tensors['cl_logits'] = logits self.tensors['cl_loss'] = loss if FLAGS.single_label: indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1)<|fim▁hole|> weights = inputs.weights acc = layers_lib.accuracy(logits, labels, weights) tf.summary.scalar('accuracy', acc) adv_loss = (self.adversarial_loss() * tf.constant( FLAGS.adv_reg_coeff, name='adv_reg_coeff')) tf.summary.scalar('adversarial_loss', adv_loss) total_loss = loss + adv_loss with tf.control_dependencies([inputs.save_state(next_state)]): total_loss = tf.identity(total_loss) tf.summary.scalar('total_classification_loss', total_loss) return total_loss def language_model_graph(self, compute_loss=True): """Constructs LM graph from inputs to LM loss. * Caches the VatxtInput object in `self.lm_inputs` * Caches tensors: `lm_embedded` Args: compute_loss: bool, whether to compute and return the loss or stop after the LSTM computation. Returns: loss: scalar float. """ inputs = _inputs('train', pretrain=True) self.lm_inputs = inputs return self._lm_loss(inputs, compute_loss=compute_loss) def _lm_loss(self, inputs, emb_key='lm_embedded', lstm_layer='lstm', lm_loss_layer='lm_loss', loss_name='lm_loss', compute_loss=True): embedded = self.layers['embedding'](inputs.tokens) self.tensors[emb_key] = embedded lstm_out, next_state = self.layers[lstm_layer](embedded, inputs.state, inputs.length) if compute_loss: loss = self.layers[lm_loss_layer]( [lstm_out, inputs.labels, inputs.weights]) with tf.control_dependencies([inputs.save_state(next_state)]): loss = tf.identity(loss) tf.summary.scalar(loss_name, loss) return loss def eval_graph(self, dataset='test'): """Constructs classifier evaluation graph. Args: dataset: the labeled dataset to evaluate, {'train', 'test', 'valid'}. Returns: eval_ops: dict<metric name, tuple(value, update_op)> var_restore_dict: dict mapping variable restoration names to variables. Trainable variables will be mapped to their moving average names. """ inputs = _inputs(dataset, pretrain=False) embedded = self.layers['embedding'](inputs.tokens) _, next_state, logits, _ = self.cl_loss_from_embedding( embedded, inputs=inputs, return_intermediates=True) if FLAGS.single_label: indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1) labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1) weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1) else: labels = inputs.labels weights = inputs.weights eval_ops = { 'accuracy': tf.contrib.metrics.streaming_accuracy( layers_lib.predictions(logits), labels, weights) } with tf.control_dependencies([inputs.save_state(next_state)]): acc, acc_update = eval_ops['accuracy'] acc_update = tf.identity(acc_update) eval_ops['accuracy'] = (acc, acc_update) var_restore_dict = make_restore_average_vars_dict() return eval_ops, var_restore_dict def cl_loss_from_embedding(self, embedded, inputs=None, return_intermediates=False): """Compute classification loss from embedding. Args: embedded: 3-D float Tensor [batch_size, num_timesteps, embedding_dim] inputs: VatxtInput, defaults to self.cl_inputs. return_intermediates: bool, whether to return intermediate tensors or only the final loss. Returns: If return_intermediates is True: lstm_out, next_state, logits, loss Else: loss """ if inputs is None: inputs = self.cl_inputs lstm_out, next_state = self.layers['lstm'](embedded, inputs.state, inputs.length) if FLAGS.single_label: indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1) lstm_out = tf.expand_dims(tf.gather_nd(lstm_out, indices), 1) labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1) weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1) else: labels = inputs.labels weights = inputs.weights logits = self.layers['cl_logits'](lstm_out) loss = layers_lib.classification_loss(logits, labels, weights) if return_intermediates: return lstm_out, next_state, logits, loss else: return loss def adversarial_loss(self): """Compute adversarial loss based on FLAGS.adv_training_method.""" def random_perturbation_loss(): return adv_lib.random_perturbation_loss(self.tensors['cl_embedded'], self.cl_inputs.length, self.cl_loss_from_embedding) def adversarial_loss(): return adv_lib.adversarial_loss(self.tensors['cl_embedded'], self.tensors['cl_loss'], self.cl_loss_from_embedding) def virtual_adversarial_loss(): """Computes virtual adversarial loss. Uses lm_inputs and constructs the language model graph if it hasn't yet been constructed. Also ensures that the LM input states are saved for LSTM state-saving BPTT. Returns: loss: float scalar. """ if self.lm_inputs is None: self.language_model_graph(compute_loss=False) def logits_from_embedding(embedded, return_next_state=False): _, next_state, logits, _ = self.cl_loss_from_embedding( embedded, inputs=self.lm_inputs, return_intermediates=True) if return_next_state: return next_state, logits else: return logits next_state, lm_cl_logits = logits_from_embedding( self.tensors['lm_embedded'], return_next_state=True) va_loss = adv_lib.virtual_adversarial_loss( lm_cl_logits, self.tensors['lm_embedded'], self.lm_inputs, logits_from_embedding) with tf.control_dependencies([self.lm_inputs.save_state(next_state)]): va_loss = tf.identity(va_loss) return va_loss def combo_loss(): return adversarial_loss() + virtual_adversarial_loss() adv_training_methods = { # Random perturbation 'rp': random_perturbation_loss, # Adversarial training 'at': adversarial_loss, # Virtual adversarial training 'vat': virtual_adversarial_loss, # Both at and vat 'atvat': combo_loss, '': lambda: tf.constant(0.), None: lambda: tf.constant(0.), } with tf.name_scope('adversarial_loss'): return adv_training_methods[FLAGS.adv_training_method]() class VatxtBidirModel(VatxtModel): """Extension of VatxtModel that supports bidirectional input.""" def __init__(self): super(VatxtBidirModel, self).__init__(cl_logits_input_dim=FLAGS.rnn_cell_size * 2) # Reverse LSTM and LM loss for bidirectional models self.layers['lstm_reverse'] = layers_lib.LSTM( FLAGS.rnn_cell_size, FLAGS.rnn_num_layers, FLAGS.keep_prob_lstm_out, name='LSTM_Reverse') self.layers['lm_loss_reverse'] = layers_lib.SoftmaxLoss( FLAGS.vocab_size, FLAGS.num_candidate_samples, self.vocab_freqs, name='LM_loss_reverse') @property def pretrained_variables(self): variables = super(VatxtBidirModel, self).pretrained_variables variables.extend(self.layers['lstm_reverse'].trainable_weights) return variables def classifier_graph(self): """Constructs classifier graph from inputs to classifier loss. * Caches the VatxtInput objects in `self.cl_inputs` * Caches tensors: `cl_embedded` (tuple of forward and reverse), `cl_logits`, `cl_loss` Returns: loss: scalar float. """ inputs = _inputs('train', pretrain=False, bidir=True) self.cl_inputs = inputs f_inputs, _ = inputs # Embed both forward and reverse with a shared embedding embedded = [self.layers['embedding'](inp.tokens) for inp in inputs] self.tensors['cl_embedded'] = embedded _, next_states, logits, loss = self.cl_loss_from_embedding( embedded, return_intermediates=True) tf.summary.scalar('classification_loss', loss) self.tensors['cl_logits'] = logits self.tensors['cl_loss'] = loss acc = layers_lib.accuracy(logits, f_inputs.labels, f_inputs.weights) tf.summary.scalar('accuracy', acc) adv_loss = (self.adversarial_loss() * tf.constant( FLAGS.adv_reg_coeff, name='adv_reg_coeff')) tf.summary.scalar('adversarial_loss', adv_loss) total_loss = loss + adv_loss saves = [inp.save_state(state) for (inp, state) in zip(inputs, next_states)] with tf.control_dependencies(saves): total_loss = tf.identity(total_loss) tf.summary.scalar('total_classification_loss', total_loss) return total_loss def language_model_graph(self, compute_loss=True): """Constructs forward and reverse LM graphs from inputs to LM losses. * Caches the VatxtInput objects in `self.lm_inputs` * Caches tensors: `lm_embedded`, `lm_embedded_reverse` Args: compute_loss: bool, whether to compute and return the loss or stop after the LSTM computation. Returns: loss: scalar float, sum of forward and reverse losses. """ inputs = _inputs('train', pretrain=True, bidir=True) self.lm_inputs = inputs f_inputs, r_inputs = inputs f_loss = self._lm_loss(f_inputs, compute_loss=compute_loss) r_loss = self._lm_loss( r_inputs, emb_key='lm_embedded_reverse', lstm_layer='lstm_reverse', lm_loss_layer='lm_loss_reverse', loss_name='lm_loss_reverse', compute_loss=compute_loss) if compute_loss: return f_loss + r_loss def eval_graph(self, dataset='test'): """Constructs classifier evaluation graph. Args: dataset: the labeled dataset to evaluate, {'train', 'test', 'valid'}. Returns: eval_ops: dict<metric name, tuple(value, update_op)> var_restore_dict: dict mapping variable restoration names to variables. Trainable variables will be mapped to their moving average names. """ inputs = _inputs(dataset, pretrain=False, bidir=True) embedded = [self.layers['embedding'](inp.tokens) for inp in inputs] _, next_states, logits, _ = self.cl_loss_from_embedding( embedded, inputs=inputs, return_intermediates=True) f_inputs, _ = inputs eval_ops = { 'accuracy': tf.contrib.metrics.streaming_accuracy( layers_lib.predictions(logits), f_inputs.labels, f_inputs.weights) } # Save states on accuracy update saves = [inp.save_state(state) for (inp, state) in zip(inputs, next_states)] with tf.control_dependencies(saves): acc, acc_update = eval_ops['accuracy'] acc_update = tf.identity(acc_update) eval_ops['accuracy'] = (acc, acc_update) var_restore_dict = make_restore_average_vars_dict() return eval_ops, var_restore_dict def cl_loss_from_embedding(self, embedded, inputs=None, return_intermediates=False): """Compute classification loss from embedding. Args: embedded: Length 2 tuple of 3-D float Tensor [batch_size, num_timesteps, embedding_dim]. inputs: Length 2 tuple of VatxtInput, defaults to self.cl_inputs. return_intermediates: bool, whether to return intermediate tensors or only the final loss. Returns: If return_intermediates is True: lstm_out, next_states, logits, loss Else: loss """ if inputs is None: inputs = self.cl_inputs out = [] for (layer_name, emb, inp) in zip(['lstm', 'lstm_reverse'], embedded, inputs): out.append(self.layers[layer_name](emb, inp.state, inp.length)) lstm_outs, next_states = zip(*out) # Concatenate output of forward and reverse LSTMs lstm_out = tf.concat(lstm_outs, 1) logits = self.layers['cl_logits'](lstm_out) f_inputs, _ = inputs # pylint: disable=unpacking-non-sequence loss = layers_lib.classification_loss(logits, f_inputs.labels, f_inputs.weights) if return_intermediates: return lstm_out, next_states, logits, loss else: return loss def adversarial_loss(self): """Compute adversarial loss based on FLAGS.adv_training_method.""" def random_perturbation_loss(): return adv_lib.random_perturbation_loss_bidir(self.tensors['cl_embedded'], self.cl_inputs[0].length, self.cl_loss_from_embedding) def adversarial_loss(): return adv_lib.adversarial_loss_bidir(self.tensors['cl_embedded'], self.tensors['cl_loss'], self.cl_loss_from_embedding) def virtual_adversarial_loss(): """Computes virtual adversarial loss. Uses lm_inputs and constructs the language model graph if it hasn't yet been constructed. Also ensures that the LM input states are saved for LSTM state-saving BPTT. Returns: loss: float scalar. """ if self.lm_inputs is None: self.language_model_graph(compute_loss=False) def logits_from_embedding(embedded, return_next_state=False): _, next_states, logits, _ = self.cl_loss_from_embedding( embedded, inputs=self.lm_inputs, return_intermediates=True) if return_next_state: return next_states, logits else: return logits lm_embedded = (self.tensors['lm_embedded'], self.tensors['lm_embedded_reverse']) next_states, lm_cl_logits = logits_from_embedding( lm_embedded, return_next_state=True) va_loss = adv_lib.virtual_adversarial_loss_bidir( lm_cl_logits, lm_embedded, self.lm_inputs, logits_from_embedding) saves = [ inp.save_state(state) for (inp, state) in zip(self.lm_inputs, next_states) ] with tf.control_dependencies(saves): va_loss = tf.identity(va_loss) return va_loss def combo_loss(): return adversarial_loss() + virtual_adversarial_loss() adv_training_methods = { # Random perturbation 'rp': random_perturbation_loss, # Adversarial training 'at': adversarial_loss, # Virtual adversarial training 'vat': virtual_adversarial_loss, # Both at and vat 'atvat': combo_loss, '': lambda: tf.constant(0.), None: lambda: tf.constant(0.), } with tf.name_scope('adversarial_loss'): return adv_training_methods[FLAGS.adv_training_method]() def _inputs(dataset='train', pretrain=False, bidir=False): return inputs_lib.inputs( data_dir=FLAGS.data_dir, phase=dataset, bidir=bidir, pretrain=pretrain, use_seq2seq=pretrain and FLAGS.use_seq2seq_autoencoder, state_size=FLAGS.rnn_cell_size, num_layers=FLAGS.rnn_num_layers, batch_size=FLAGS.batch_size, unroll_steps=FLAGS.num_timesteps, eos_id=FLAGS.vocab_size - 1) def _get_vocab_freqs(): """Returns vocab frequencies. Returns: List of integers, length=FLAGS.vocab_size. Raises: ValueError: if the length of the frequency file is not equal to the vocab size, or if the file is not found. """ path = FLAGS.vocab_freq_path or os.path.join(FLAGS.data_dir, 'vocab_freq.txt') if tf.gfile.Exists(path): with tf.gfile.Open(path) as f: # Get pre-calculated frequencies of words. reader = csv.reader(f, quoting=csv.QUOTE_NONE) freqs = [int(row[-1]) for row in reader] if len(freqs) != FLAGS.vocab_size: raise ValueError('Frequency file length %d != vocab size %d' % (len(freqs), FLAGS.vocab_size)) else: if FLAGS.vocab_freq_path: raise ValueError('vocab_freq_path not found') freqs = [1] * FLAGS.vocab_size return freqs def make_restore_average_vars_dict(): """Returns dict mapping moving average names to variables.""" var_restore_dict = {} variable_averages = tf.train.ExponentialMovingAverage(0.999) for v in tf.global_variables(): if v in tf.trainable_variables(): name = variable_averages.average_name(v) else: name = v.op.name var_restore_dict[name] = v return var_restore_dict def optimize(loss, global_step): return layers_lib.optimize( loss, global_step, FLAGS.max_grad_norm, FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, FLAGS.sync_replicas, FLAGS.replicas_to_aggregate, FLAGS.task)<|fim▁end|>
labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1) weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1) else: labels = inputs.labels
<|file_name|>consulta_partidas_por_producto.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright (C) 2005-2008 Francisco José Rodríguez Bogado, # # Diego Muñoz Escalante. # # ([email protected], [email protected]) # # # # This file is part of GeotexInn. # # # # GeotexInn is free software; you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation; either version 2 of the License, or # # (at your option) any later version. # # # # GeotexInn is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with GeotexInn; if not, write to the Free Software # # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # ############################################################################### ################################################################### ## consulta_partidas_por_producto.py ################################################################### ## NOTAS: ## ################################################################### ## Changelog: ## 4 de abril de 2006 -> Inicio ## ################################################################### from ventana import Ventana from formularios import utils import pygtk pygtk.require('2.0') import gtk, time from framework import pclases import mx.DateTime from informes import geninformes from ventana_progreso import VentanaActividad class ConsultaPartidasPorProducto(Ventana): def __init__(self, objeto = None, usuario = None): """ Constructor. objeto puede ser un objeto de pclases con el que comenzar la ventana (en lugar del primero de la tabla, que es el que se muestra por defecto). """ global fin Ventana.__init__(self, 'consulta_partidas_por_producto.glade', objeto, usuario = usuario) connections = {'b_salir/clicked': self.salir, 'b_buscar/clicked': self.buscar, 'b_imprimir/clicked': self.imprimir, 'b_fecha_inicio/clicked': self.set_inicio, 'b_fecha_fin/clicked': self.set_fin, "b_exportar/clicked": self.exportar}<|fim▁hole|> self.add_connections(connections) cols = (('Num. Partida','gobject.TYPE_INT64',False,True,False,None), ('Código','gobject.TYPE_STRING',False,True,False,None), ('Fecha fab.','gobject.TYPE_STRING',False,True,False,None), ('Longitudinal','gobject.TYPE_STRING',False,True,False,None), ('Transversal','gobject.TYPE_STRING',False,True,False,None), ('Compresión','gobject.TYPE_STRING',False,True,False,None), ('Perforación','gobject.TYPE_STRING',False,True,False,None), ('Permeabilidad','gobject.TYPE_STRING',False,True,False,None), ('Poros','gobject.TYPE_STRING',False,True,False,None), ('Espesor','gobject.TYPE_STRING',False,True,False,None), ('Piramidal','gobject.TYPE_STRING',False,True,False,None), ('Idpartida','gobject.TYPE_INT64',False,False,False,None)) utils.preparar_listview(self.wids['tv_datos'], cols) self.wids['tv_datos'].connect("row-activated", self.abrir_parte_tv) utils.rellenar_lista(self.wids['cmbe_producto'], [(p.id, p.descripcion) for p in pclases.ProductoVenta.select(pclases.ProductoVenta.q.camposEspecificosRolloID != None, orderBy = 'descripcion')]) temp = time.localtime() self.fin = mx.DateTime.localtime() self.inicio = None self.resultado = [] self.wids['e_fechafin'].set_text(utils.str_fecha(temp)) gtk.main() def exportar(self, boton): """ Exporta el contenido del TreeView a un fichero csv. """ from informes.treeview2csv import treeview2csv from formularios.reports import abrir_csv tv = self.wids['tv_datos'] abrir_csv(treeview2csv(tv)) def chequear_cambios(self): pass def rellenar_tabla(self,lista = []): """ Rellena el model con los resultados de la búsqueda almacenados en una lista de partidas. """ model = self.wids['tv_datos'].get_model() self.wids['tv_datos'].freeze_child_notify() self.wids['tv_datos'].set_model(None) model.clear() for elem in lista: model.append((elem.numpartida, elem.codigo, # Fecha de fabricación del primero de los artículos del lote elem.rollos[0].articulos[0].parteDeProduccion and \ utils.str_fecha(elem.rollos[0].articulos[0].parteDeProduccion.fecha) or \ "¡PARTE NO ENCONTRADO!", "%.2f" % elem.longitudinal, "%.2f" % elem.transversal, "%.2f" % elem.compresion, "%.2f" % elem.perforacion, "%.2f" % elem.permeabilidad, "%.2f" % elem.poros, "%.2f" % elem.espesor, "%.2f" % elem.piramidal, elem.id)) # elem.rollos[0].articulos[0].parteDeProduccion and \ # elem.rollos[0].articulos[0].parteDeProduccion.id or \ # -1)) self.wids['tv_datos'].set_model(model) self.wids['tv_datos'].thaw_child_notify() def set_inicio(self,boton): temp = utils.mostrar_calendario(padre = self.wids['ventana']) self.wids['e_fechainicio'].set_text(utils.str_fecha(temp)) self.inicio = mx.DateTime.DateTimeFrom(day = temp[0], month = temp[1], year = temp[2]) def set_fin(self,boton): temp = utils.mostrar_calendario(padre = self.wids['ventana']) self.wids['e_fechafin'].set_text(utils.str_fecha(temp)) self.fin = mx.DateTime.DateTimeFrom(day = temp[0], month = temp[1], year = temp[2]) def por_fecha(self,e1,e2): """ Permite ordenar una lista de albaranes por fecha """ if e1.fecha < e2.fecha: return -1 elif e1.fecha > e2.fecha: return 1 else: return 0 def get_unambiguous_fecha(self, fecha): try: res = fecha.strftime('%B %d, %Y') except AttributeError: # Fecha es None return "" trans = {'January': 'enero', 'February': 'febrero', 'March': 'marzo', 'April': 'abril', 'May': 'mayo', 'June': 'junio', 'July': 'julio', 'August': 'agosto', 'September': 'septiembre', 'October': 'octubre', 'November': 'noviembre', 'December': 'diciembre'} for in_english in trans: res = res.replace(trans[in_english], in_english) return res def buscar(self,boton): """ """ idproducto = utils.combo_get_value(self.wids['cmbe_producto']) if idproducto == None: utils.dialogo_info(titulo = 'ERROR', texto = 'Seleccione un producto', padre = self.wids['ventana']) return producto = pclases.ProductoVenta.get(idproducto) and_fecha_inicio = "AND parte_de_produccion.fecha >= '%s'" % (self.get_unambiguous_fecha(self.inicio)) if producto.es_rollo(): parte_where_de_consulta = """ partida.id IN (SELECT rollo.partida_id FROM rollo WHERE rollo.id IN (SELECT articulo.rollo_id FROM articulo WHERE articulo.producto_venta_id = %d AND articulo.parte_de_produccion_id IN (SELECT parte_de_produccion.id FROM parte_de_produccion WHERE parte_de_produccion.fecha <= '%s' %s ORDER BY parte_de_produccion.fecha ) ) ) """ % (producto.id, self.get_unambiguous_fecha(self.fin), self.inicio and and_fecha_inicio or "") else: parte_where_de_consulta = """ partida.id IN (SELECT bala.partida_carga_id FROM bala WHERE bala.id IN (SELECT articulo.rollo_id FROM articulo WHERE articulo.producto_venta_id = %d AND articulo.parte_de_produccion_id IN (SELECT parte_de_produccion.id FROM parte_de_produccion WHERE parte_de_produccion.fecha <= '%s' %s ORDER BY parte_de_produccion.fecha ) ) ) """ % (producto.id, self.get_unambiguous_fecha(self.fin), self.inicio and and_fecha_inicio or "") partidas = pclases.Partida.select(parte_where_de_consulta, distinct = True) # Hasta aquí la consulta optimizada para obtener las partidas. Pasamos a recuperar los datos en sí: vpro = VentanaActividad(padre = self.wids['ventana']) vpro.mostrar() self.resultado = [] for p in partidas: vpro.mover() self.resultado.append(p) vpro.ocultar() self.resultado = partidas self.rellenar_tabla(self.resultado) def abrir_parte_tv(self, treeview, path, view_column): idpartida = treeview.get_model()[path][-1] partida = pclases.Partida.get(idpartida) try: parte = partida.rollos[0].articulos[0].parteDeProduccion except AttributeError, e: print "No se encontró el parte: %s", e if parte.es_de_balas(): from formularios import partes_de_fabricacion_balas ventana_parteb = partes_de_fabricacion_balas.PartesDeFabricacionBalas(parte) # @UnusedVariable else: from formularios import partes_de_fabricacion_rollos ventana_parteb = partes_de_fabricacion_rollos.PartesDeFabricacionRollos(parte) # @UnusedVariable def imprimir(self,boton): """ Prepara la vista preliminar para la impresión del informe """ from formularios import reports datos = [] lista = self.resultado for elem in lista: datos.append((elem.numpartida, "%.2f" % elem.longitudinal, "%.2f" % elem.transversal, "%.2f" % elem.compresion, "%.2f" % elem.perforacion, "%.2f" % elem.permeabilidad, "%.2f" % elem.poros, "%.2f" % elem.piramidal, "%.2f" % elem.espesor)) if (self.inicio) == None: fechaInforme = 'Hasta '+utils.str_fecha(self.fin) else: fechaInforme = (utils.str_fecha(self.inicio) + ' - ' + utils.str_fecha(self.fin)) if datos != []: reports.abrir_pdf(geninformes.laboratorioPartidas(datos, fechaInforme)) if __name__ == '__main__': t = ConsultaPartidasPorProducto()<|fim▁end|>
<|file_name|>test_annotations.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import matplotlib # Force matplotlib to not use any Xwindows backend. matplotlib.use('Agg') import matplotlib.pyplot as plt from plotly.tests.utils import compare_dict from plotly.tests.test_optional.optional_utils import run_fig from plotly.tests.test_optional.test_matplotlylib.data.annotations import * def test_annotations(): fig, ax = plt.subplots()<|fim▁hole|> ax.plot([1, 2, 3], 'b-') ax.plot([3, 2, 1], 'b-') ax.text(0.001, 0.999, 'top-left', transform=ax.transAxes, va='top', ha='left') ax.text(0.001, 0.001, 'bottom-left', transform=ax.transAxes, va='baseline', ha='left') ax.text(0.999, 0.999, 'top-right', transform=ax.transAxes, va='top', ha='right') ax.text(0.999, 0.001, 'bottom-right', transform=ax.transAxes, va='baseline', ha='right') renderer = run_fig(fig) for data_no, data_dict in enumerate(renderer.plotly_fig['data']): equivalent, msg = compare_dict(data_dict, ANNOTATIONS['data'][data_no]) assert equivalent, msg for no, note in enumerate(renderer.plotly_fig['layout']['annotations']): equivalent, msg = compare_dict(note, ANNOTATIONS['layout']['annotations'][no]) assert equivalent, msg<|fim▁end|>
<|file_name|>after.js<|end_file_name|><|fim▁begin|>var toInteger = require('./toInteger'); /** Used as the `TypeError` message for "Functions" methods. */ var FUNC_ERROR_TEXT = 'Expected a function'; /** * The opposite of `_.before`; this method creates a function that invokes * `func` once it's called `n` or more times. * * @static * @memberOf _ * @category Function * @param {number} n The number of calls before `func` is invoked. * @param {Function} func The function to restrict. * @returns {Function} Returns the new restricted function. * @specs * * var saves = ['profile', 'settings']; * * var done = _.after(saves.length, function() { * console.log('done saving!'); * }); * * _.forEach(saves, function(type) { * asyncSave({ 'type': type, 'complete': done }); * }); * // => logs 'done saving!' after the two async saves have completed */ function after(n, func) { if (typeof func != 'function') { throw new TypeError(FUNC_ERROR_TEXT); }<|fim▁hole|> } }; } module.exports = after;<|fim▁end|>
n = toInteger(n); return function() { if (--n < 1) { return func.apply(this, arguments);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Worker implementation.""" from __future__ import absolute_import, unicode_literals from .worker import WorkController <|fim▁hole|>__all__ = ('WorkController',)<|fim▁end|>
<|file_name|>test_scene.py<|end_file_name|><|fim▁begin|>"""deCONZ scene platform tests.""" from copy import deepcopy from asynctest import patch from homeassistant.components import deconz from homeassistant.setup import async_setup_component import homeassistant.components.scene as scene from .test_gateway import ENTRY_CONFIG, DECONZ_WEB_REQUEST, setup_deconz_integration GROUPS = { "1": { "id": "Light group id", "name": "Light group", "type": "LightGroup", "state": {"all_on": False, "any_on": True}, "action": {}, "scenes": [{"id": "1", "name": "Scene"}], "lights": [], } } async def test_platform_manually_configured(hass): """Test that we do not discover anything or try to set up a gateway.""" assert ( await async_setup_component( hass, scene.DOMAIN, {"scene": {"platform": deconz.DOMAIN}} ) is True ) assert deconz.DOMAIN not in hass.data async def test_no_scenes(hass): """Test that scenes can be loaded without scenes being available."""<|fim▁hole|> hass, ENTRY_CONFIG, options={}, get_state_response=data ) assert len(gateway.deconz_ids) == 0 assert len(hass.states.async_all()) == 0 async def test_scenes(hass): """Test that scenes works.""" data = deepcopy(DECONZ_WEB_REQUEST) data["groups"] = deepcopy(GROUPS) gateway = await setup_deconz_integration( hass, ENTRY_CONFIG, options={}, get_state_response=data ) assert "scene.light_group_scene" in gateway.deconz_ids assert len(hass.states.async_all()) == 1 light_group_scene = hass.states.get("scene.light_group_scene") assert light_group_scene group_scene = gateway.api.groups["1"].scenes["1"] with patch.object( group_scene, "_async_set_state_callback", return_value=True ) as set_callback: await hass.services.async_call( "scene", "turn_on", {"entity_id": "scene.light_group_scene"}, blocking=True ) await hass.async_block_till_done() set_callback.assert_called_with("/groups/1/scenes/1/recall", {}) await gateway.async_reset() assert len(hass.states.async_all()) == 0<|fim▁end|>
data = deepcopy(DECONZ_WEB_REQUEST) gateway = await setup_deconz_integration(
<|file_name|>index.js<|end_file_name|><|fim▁begin|>"use strict"; var responsive_window_1 = require("./responsive-window");<|fim▁hole|>]; //# sourceMappingURL=index.js.map<|fim▁end|>
exports.ResponsiveWindow = responsive_window_1.ResponsiveWindow; exports.RESPONSIVEWINDOW_DIRECTIVE = [ responsive_window_1.ResponsiveWindow
<|file_name|>ieem_sitelist_parser_browsertest.cc<|end_file_name|><|fim▁begin|>// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/browser_switcher/ieem_sitelist_parser.h" #include "base/bind.h" #include "base/callback_helpers.h" #include "base/memory/scoped_refptr.h" #include "base/run_loop.h" #include "chrome/test/base/in_process_browser_test.h" #include "content/public/test/browser_test.h" #include "services/data_decoder/public/cpp/safe_xml_parser.h" #include "testing/gtest/include/gtest/gtest.h" namespace browser_switcher { namespace { void OnXmlParsed(base::RepeatingClosure quit_run_loop, ParsedXml expected, ParsedXml actual) { base::ScopedClosureRunner runner(std::move(quit_run_loop)); EXPECT_EQ(expected.rules, actual.rules); EXPECT_EQ(expected.error.has_value(), actual.error.has_value()); if (expected.error.has_value() && actual.error.has_value()) EXPECT_EQ(*expected.error, *actual.error); } void TestParseXml(const std::string& xml, ParsedXml expected) { base::RunLoop run_loop; ParseIeemXml(xml, base::BindOnce(&OnXmlParsed, run_loop.QuitClosure(), std::move(expected))); run_loop.Run(); } } // namespace class IeemSitelistParserTest : public InProcessBrowserTest { public: IeemSitelistParserTest() = default; ~IeemSitelistParserTest() override = default; }; IN_PROC_BROWSER_TEST_F(IeemSitelistParserTest, BadXml) { TestParseXml("", ParsedXml({}, "Invalid XML: bad content")); TestParseXml("thisisnotxml", ParsedXml({}, "Invalid XML: bad content")); } IN_PROC_BROWSER_TEST_F(IeemSitelistParserTest, BadXmlParsed) { TestParseXml("<bogus></bogus>", ParsedXml({}, "Invalid XML root element")); TestParseXml("<rules version=\"424\"><unknown></unknown></rules>", ParsedXml({}, base::nullopt)); } IN_PROC_BROWSER_TEST_F(IeemSitelistParserTest, V1OnlyBogusElements) { std::string xml = "<rules version=\"424\">" "<unknown><more><docMode><domain>ignore.com</domain></docMode>" "</more><emie><domain>ignoretoo.com<path>/ignored_path</path>" "</domain></emie><domain>onemoreignored.com</domain>" "<path>/ignore_outside_of_domain></path></unknown></rules>"; TestParseXml(xml, ParsedXml({}, base::nullopt)); } IN_PROC_BROWSER_TEST_F(IeemSitelistParserTest, V1Full) { std::string xml = "<rules version=\"424\"><unknown><more><docMode><domain>ignore" "</domain></docMode></more><emie><domain>ignoretoo.com<path>/ignored_path" "</path></domain></emie><domain>onemoreingored.com</domain><path>" "/ignore_outside_of_domain></path></unknown><emie><other><more><docMode>" "<domain>ignore.com</domain></docMode></more><emie><domain>ignoretoo.com" "<path>/ignored_path</path></domain></emie><domain>onemoreingored.com" "</domain><path>/ignore_outside_of_domain></path></other><!--<domain " "exclude=\"false\">hotscanacc.dbch.b-source.net<path exclude=\"false\">" "/HotScan/</path></domain>--><domain>inside.com<more><docMode><domain>" "ignore.com</domain></docMode></more><emie><domain>ignoretoo.com<path>" "/ignored_path</path></domain></emie><domain>onemoreingored.com</domain>" "<path>/in_domain<more><docMode><domain>ignore.com</domain></docMode>" "</more><emie><domain>ignoretoo.com<path>/ignored_path</path></domain>" "</emie><domain>onemoreingored.com</domain><path>/ignore_nested_path>" "</path></path></domain><domain> \ngoogle.com\t\t \t</domain><domain " "exclude=\"true\">good.com</domain><domain exclude=\"false\">more.com" "</domain><domain>e100.com<path>/path1</path><path exclude=\"true\">/pa2" "</path><path exclude=\"false\">/path3</path></domain><domain " "exclude=\"true\">e200.com<path>/path1</path><path exclude=\"true\">/pth2" "</path><path exclude=\"false\">/path3</path></domain><domain " "exclude=\"false\">e300.com<path>/path1</path><path exclude=\"true\">/pt2" "</path><path exclude=\"false\">/path3</path></domain><domain " "exclude=\"true\">random.com<path exclude=\"true\">/path1/</path><path " "exclude=\"false\" forceCompatView=\"true\">/path2<path exclude=\"true\">" "/TEST</path></path></domain></emie><docMode><domain docMode=\"8\">" "moredomains.com</domain><domain docMode=\"5\">evenmore.com<path " "docMode=\"5\">/r1</path><path docMode=\"5\">/r2</path></domain><domain " "docMode=\"5\" exclude=\"true\">domainz.com<path docMode=\"5\">/r2</path>" "<path docMode=\"5\" exclude=\"true\"> \n/r5\t</path><path docMode=\"5\" " "exclude=\"false\">/r6</path></domain><domain docMode=\"5\" " "exclude=\"false\">howmanydomainz.com<path docMode=\"5\">/r8</path><path " "docMode=\"5\" exclude=\"true\">/r9</path><path docMode=\"5\" " "exclude=\"false\">/r10</path></domain><domain exclude=\"true\" " "doNotTransition=\"true\">maybe.com<path>/yestransition</path>" "<path doNotTransition=\"true\">/guessnot</path></domain><domain>" "yes.com<path doNotTransition=\"true\">/actuallyno</path></domain>" "<domain doNotTransition=\"true\">no.com</domain></docMode></rules>"; std::vector<std::string> expected_sitelist = { "inside.com", "inside.com/in_domain", "google.com", "more.com", "e100.com", "e100.com/path1", "e100.com/path3",<|fim▁hole|> "e200.com/path3", "e300.com", "e300.com/path1", "e300.com/path3", "random.com/path2", "moredomains.com", "evenmore.com", "evenmore.com/r1", "evenmore.com/r2", "domainz.com/r2", "domainz.com/r6", "howmanydomainz.com", "howmanydomainz.com/r8", "howmanydomainz.com/r10", "maybe.com/yestransition", "!maybe.com/guessnot", "yes.com", "!yes.com/actuallyno", "!no.com", }; TestParseXml(xml, ParsedXml(std::move(expected_sitelist), base::nullopt)); } IN_PROC_BROWSER_TEST_F(IeemSitelistParserTest, V2Full) { // Very subtle issue in the closing element for rules. std::string xml = "<site-list version=\"205\"><!-- File creation header -->" "<created-by><tool>EnterpriseSitelistManager</tool><version>10240" "</version><date-created>20150728.135021</date-created></created-by>" "<!-- unknown tags --><unknown><test><mest>test</mest></test>" "<!-- comments --></unknown><!-- no url attrib --><site><open-in>none" "</open-in></site><!-- nested site list --><site-list><site " "url=\"ignore!\"/></site-list><!-- nested site --><site " "url=\"google.com\"><site url=\"nested ignore!\"></site></site><!-- " "unknown tags in a site on multiple levels --><site url=\"good.site\">" "<!-- nested comments --><somethings>klj<other some=\"none\"/>jkh" "</somethings></site><!-- good sites --> <site url=\"www.cpandl.com\">" "<compat-mode>IE8Enterprise</compat-mode><open-in>MSEdge</open-in></site>" "<site url=\"contoso.com\"><compat-mode>default</compat-mode><open-in>" "None</open-in></site><site url=\"relecloud.com\"/><site " "url=\"relecloud.com/about\"><compat-mode>IE8Enterprise</compat-mode>" "</site></site-list><!-- trailing gibberish <trailing><site " "url=\"ignore after site list!\"> <compat-mode>IE8Enterprise\"" "</compat-mode></site><gibberish>Lorem ipsum sit...</gibberish>" "</trailing>-->"; std::vector<std::string> expected_sitelist = { "!google.com", "!good.site", "www.cpandl.com", "!contoso.com", "!relecloud.com", "!relecloud.com/about", }; TestParseXml(xml, ParsedXml(std::move(expected_sitelist), base::nullopt)); } } // namespace browser_switcher<|fim▁end|>
"e200.com/path1",
<|file_name|>test_md.py<|end_file_name|><|fim▁begin|># SPDX-License-Identifier: GPL-2.0 # Copyright (c) 2015 Stephen Warren # Copyright (c) 2015-2016, NVIDIA CORPORATION. All rights reserved. import pytest import u_boot_utils @pytest.mark.buildconfigspec('cmd_memory') def test_md(u_boot_console): """Test that md reads memory as expected, and that memory can be modified<|fim▁hole|> val = 'a5f09876' expected_response = addr + ': ' + val u_boot_console.run_command('mw ' + addr + ' 0 10') response = u_boot_console.run_command('md ' + addr + ' 10') assert(not (expected_response in response)) u_boot_console.run_command('mw ' + addr + ' ' + val) response = u_boot_console.run_command('md ' + addr + ' 10') assert(expected_response in response) @pytest.mark.buildconfigspec('cmd_memory') def test_md_repeat(u_boot_console): """Test command repeat (via executing an empty command) operates correctly for "md"; the command must repeat and dump an incrementing address.""" ram_base = u_boot_utils.find_ram_base(u_boot_console) addr_base = '%08x' % ram_base words = 0x10 addr_repeat = '%08x' % (ram_base + (words * 4)) u_boot_console.run_command('md %s %x' % (addr_base, words)) response = u_boot_console.run_command('') expected_response = addr_repeat + ': ' assert(expected_response in response)<|fim▁end|>
using the mw command.""" ram_base = u_boot_utils.find_ram_base(u_boot_console) addr = '%08x' % ram_base
<|file_name|>status.rs<|end_file_name|><|fim▁begin|>use config::{Config, OutputFormat}; use utils::console::*; use utils::output; use clap::{App, Arg, ArgMatches, SubCommand}; use hyper::Client; use serde_json; use std::io::Read; use std::str; use webbrowser; pub const NAME: &'static str = "status"; error_chain! { errors { CenterDeviceStatusFailed { description("failed to get CenterDevice status") display("failed to get CenterDevice status") } } } #[derive(Deserialize, Debug)] enum Status { Okay, Warning, Failed, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct Rest { Status: Status, Timestamp: String, NotificationQueueClientPool: bool, FolderHealthCheckSensor: bool, DocumentQueueClientPool: bool, MetadataStoreResource: bool, NotificationStoreResource: bool, SearchEngineResource: bool, SecurityDataStoreResource: bool, SendEmailQueueClientPool: bool, UserdataStoreResource: bool, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct Auth { Status: Status, Timestamp: String, AuthServer: bool, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct WebClient { Status: Status, Timestamp: String, NotificationAlertingService: bool, RestServerSensor: bool, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct PublicLink { Status: Status, Timestamp: String, PublicLinkClient: bool, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct DistributorConsole { Status: Status, Timestamp: String, RestServerSensor: bool, } #[allow(non_camel_case_types)] #[derive(Deserialize, Debug)] enum PingDomStatus { up, down, } #[derive(Deserialize, Debug)] struct Checks { checks: Vec<Vec<Check>>, } #[derive(Deserialize, Debug)] struct Check { hostname: String, status: PingDomStatus, lasttesttime: String, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct PingDom { Status: Status, Timestamp: String, Checks: Checks, } #[allow(non_snake_case)] #[derive(Deserialize, Debug)] struct CenterDeviceStatus { Status: Status, Rest: Rest, Auth: Auth, WebClient: WebClient, PublicLink: PublicLink, DistributorConsole: DistributorConsole, PingDom: PingDom, } pub fn build_sub_cli() -> App<'static, 'static> { SubCommand::with_name(NAME) .about("Gets public centerdevice status from status server") .arg(Arg::with_name("details") .long("details") .help("Show detailed output")) .arg(Arg::with_name("browser") .long("browser") .help("Open status in web browser")) } pub fn call(args: Option<&ArgMatches>, config: &Config) -> Result<()> { let details = args.ok_or(false).unwrap().is_present("details"); let browser = args.ok_or(false).unwrap().is_present("browser"); if browser { info("Opening CenterDevice Status in default browser ..."); browse(config, details).chain_err(|| ErrorKind::CenterDeviceStatusFailed) } else { info("Getting CenterDevice Status ..."); status(config, details).chain_err(|| ErrorKind::CenterDeviceStatusFailed) } } fn browse(config: &Config, details: bool) -> Result<()> { if details { webbrowser::open("http://status.centerdevice.de/details.html") } else { webbrowser::open("http://status.centerdevice.de") }.chain_err(|| "Failed to open default browser")?; if config.general.output_format == OutputFormat::JSON { msgln("{}"); } Ok(()) } fn status(config: &Config, details: bool) -> Result<()> { let json = get_centerdevice_status_json()?; output(&json, &config.general.output_format, details) } fn get_centerdevice_status_json() -> Result<String> { let url = "http://status.centerdevice.de/details.json"; let mut response = Client::new() .get(url) .send() .chain_err(|| ErrorKind::CenterDeviceStatusFailed)?; let mut buffer = Vec::new(); response.read_to_end(&mut buffer).chain_err(|| "Failed to read HTTP response")?; let json = str::from_utf8(&buffer).chain_err(|| "Failed to parse JSON")?; Ok(json.to_string()) } fn output(json: &str, format: &OutputFormat, details: bool) -> Result<()> { match *format { OutputFormat::HUMAN => output_human(json, details), OutputFormat::JSON => output::as_json(json) .chain_err(|| ErrorKind::CenterDeviceStatusFailed), } } fn output_human(json: &str, details: bool) -> Result<()> { let status: CenterDeviceStatus = serde_json::from_str(json).chain_err(|| "JSON parsing failed")?; match (&status.Status, details) { (&Status::Okay, false) => msgln(format!("CenterDevice status is {:?}.", status.Status)),<|fim▁hole|> msgln(format!("CenterDevice status is {:?}.", status.Status)); msgln(format!("+ Rest: {:?}", status.Rest.Status)); msgln(format!("+ Auth: {:?}", status.Auth.Status)); msgln(format!("+ WebClient: {:?}", status.WebClient.Status)); msgln(format!("+ PublicLink: {:?}", status.PublicLink.Status)); msgln(format!("+ DistributorConsole: {:?}", status.DistributorConsole.Status)); msgln(format!("+ PingDom: {:?}", status.PingDom.Status)); } } Ok(()) }<|fim▁end|>
(&Status::Okay, true) | (&Status::Warning, _) | (&Status::Failed, _) => {
<|file_name|>variables_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|># ============================================================================== """Tests for tf.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import operator import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors_impl from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import gen_state_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import gradient_descent from tensorflow.python.util import compat class VariablesTestCase(test.TestCase): def testInitialization(self): with self.test_session(): var0 = variables.Variable(0.0) self.assertEqual("Variable:0", var0.name) self.assertEqual([], var0.get_shape()) self.assertEqual([], var0.get_shape()) self.assertEqual([], var0.shape) var1 = variables.Variable(1.1) self.assertEqual("Variable_1:0", var1.name) self.assertEqual([], var1.get_shape()) self.assertEqual([], var1.get_shape()) self.assertEqual([], var1.shape) with self.assertRaisesOpError("Attempting to use uninitialized value"): var0.eval() with self.assertRaisesOpError("Attempting to use uninitialized value"): var1.eval() variables.global_variables_initializer().run() self.assertAllClose(0.0, var0.eval()) self.assertAllClose(1.1, var1.eval()) def testInitializationOrder(self): with self.test_session(): rnd = variables.Variable(random_ops.random_uniform([3, 6]), name="rnd") self.assertEqual("rnd:0", rnd.name) self.assertEqual([3, 6], rnd.get_shape()) self.assertEqual([3, 6], rnd.get_shape()) self.assertEqual([3, 6], rnd.shape) dep = variables.Variable(rnd.initialized_value(), name="dep") self.assertEqual("dep:0", dep.name) self.assertEqual([3, 6], dep.get_shape()) self.assertEqual([3, 6], dep.get_shape()) self.assertEqual([3, 6], dep.shape) # Currently have to set the shape manually for Add. added_val = rnd.initialized_value() + dep.initialized_value() + 2.0 added_val.set_shape(rnd.get_shape()) depdep = variables.Variable(added_val, name="depdep") self.assertEqual("depdep:0", depdep.name) self.assertEqual([3, 6], depdep.get_shape()) self.assertEqual([3, 6], depdep.get_shape()) self.assertEqual([3, 6], depdep.shape) variables.global_variables_initializer().run() self.assertAllClose(rnd.eval(), dep.eval()) self.assertAllClose(rnd.eval() + dep.eval() + 2.0, depdep.eval()) def testIterable(self): with self.assertRaisesRegexp(TypeError, "not iterable"): for _ in variables.Variable(0.0): pass with self.assertRaisesRegexp(TypeError, "not iterable"): for _ in variables.Variable([0.0, 1.0]): pass def testAssignments(self): with self.test_session(): var = variables.Variable(0.0) plus_one = var.assign_add(1.0) minus_one = var.assign_sub(2.0) four = var.assign(4.0) variables.global_variables_initializer().run() self.assertAllClose(0.0, var.eval()) self.assertAllClose(1.0, plus_one.eval()) self.assertAllClose(1.0, var.eval()) self.assertAllClose(-1.0, minus_one.eval()) self.assertAllClose(-1.0, var.eval()) self.assertAllClose(4.0, four.eval()) self.assertAllClose(4.0, var.eval()) def testResourceAssignments(self): with self.test_session(use_gpu=True): var = resource_variable_ops.ResourceVariable(0.0) plus_one = var.assign_add(1.0) minus_one = var.assign_sub(2.0) four = var.assign(4.0) variables.global_variables_initializer().run() self.assertAllClose(0.0, var.eval()) plus_one.eval() self.assertAllClose(1.0, var.eval()) minus_one.eval() self.assertAllClose(-1.0, var.eval()) four.eval() self.assertAllClose(4.0, var.eval()) def testZeroSizeStringAssign(self): with self.test_session() as sess: array = variables.Variable( initial_value=array_ops.zeros((0,), dtype=dtypes.string), name="foo", trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES]) sess.run(variables.local_variables_initializer()) old_value = array.value() copy_op = array.assign(old_value) self.assertEqual([], list(sess.run(copy_op))) def _countUpToTest(self, dtype): with self.test_session(): zero = constant_op.constant(0, dtype=dtype) var = variables.Variable(zero) count_up_to = var.count_up_to(3) variables.global_variables_initializer().run() self.assertEqual(0, var.eval()) self.assertEqual(0, count_up_to.eval()) self.assertEqual(1, var.eval()) self.assertEqual(1, count_up_to.eval()) self.assertEqual(2, var.eval()) self.assertEqual(2, count_up_to.eval()) self.assertEqual(3, var.eval()) with self.assertRaisesOpError("Reached limit of 3"): count_up_to.eval() self.assertEqual(3, var.eval()) with self.assertRaisesOpError("Reached limit of 3"): count_up_to.eval() self.assertEqual(3, var.eval()) def testCountUpToInt32(self): self._countUpToTest(dtypes.int32) def testCountUpToInt64(self): self._countUpToTest(dtypes.int64) def testControlDepsNone(self): with self.test_session(): c = constant_op.constant(1.0) with ops.control_dependencies([c]): # d get the control dep. d = constant_op.constant(2.0) # variables do not. var_x = variables.Variable(2.0) self.assertEqual([c.op], d.op.control_inputs) self.assertEqual([], var_x.initializer.control_inputs) self.assertEqual([], var_x.value().op.control_inputs) self.assertEqual([], var_x._ref().op.control_inputs) # pylint: disable=protected-access def testControlFlow(self): with self.test_session() as sess: v0 = variables.Variable(0, name="v0") var_dict = {} # Call get_variable in each of the cond clauses. def var_in_then_clause(): v1 = variables.Variable(1, name="v1") var_dict["v1"] = v1 return v1 + v0 def var_in_else_clause(): v2 = variables.Variable(2, name="v2") var_dict["v2"] = v2 return v2 + v0 add = control_flow_ops.cond( math_ops.less(v0, 10), var_in_then_clause, var_in_else_clause) v1 = var_dict["v1"] v2 = var_dict["v2"] # We should be able to initialize and run v1 and v2 without initializing # v0, even if the variable was created with a control dep on v0. sess.run(v1.initializer) self.assertEqual([1], sess.run(v1)) sess.run(v2.initializer) self.assertEqual([2], sess.run(v2)) # v0 should still be uninitialized. with self.assertRaisesRegexp(errors_impl.OpError, "uninitialized"): sess.run(v0) # We should not be able to run 'add' yet. with self.assertRaisesRegexp(errors_impl.OpError, "uninitialized"): sess.run(add) # If we initialize v0 we should be able to run 'add'. sess.run(v0.initializer) sess.run(add) def testControlFlowInitialization(self): """Expects an error if an initializer is in a control-flow scope.""" def cond(i, _): return i < 10 def body(i, _): zero = array_ops.zeros([], dtype=dtypes.int32) v = variables.Variable(initial_value=zero) return (i + 1, v.read_value()) with self.assertRaisesRegexp(ValueError, "inside a control-flow"): control_flow_ops.while_loop(cond, body, [0, 0]) def testUseVariableAsTensor(self): with self.test_session(): var_x = variables.Variable(2.0) var_y = variables.Variable(3.0) variables.global_variables_initializer().run() self.assertAllClose(2.0, var_x.eval()) self.assertAllClose(3.0, var_y.eval()) self.assertAllClose(5.0, math_ops.add(var_x, var_y).eval()) def testZeroSizeVarSameAsConst(self): with self.test_session(): zero_size_var = variables.Variable(array_ops.zeros([0, 2])) zero_size_const = array_ops.ones([2, 0]) variable_mul = math_ops.matmul(zero_size_const, zero_size_var) const_mul = math_ops.matmul( zero_size_const, zero_size_const, transpose_b=True) variables.global_variables_initializer().run() variable_output = variable_mul.eval() self.assertAllClose(const_mul.eval(), variable_output) self.assertAllClose([[0., 0.], [0., 0.]], variable_output) def testCachingDevice(self): with self.test_session(): var = variables.Variable(2.0) self.assertEqual(var.device, var.value().device) self.assertEqual(var.device, var.initialized_value().device) var_cached = variables.Variable(2.0, caching_device="/job:foo") self.assertFalse(var_cached.device.startswith("/job:foo")) self.assertTrue(var_cached.value().device.startswith("/job:foo")) def testCollections(self): with self.test_session(): var_x = variables.Variable(2.0) var_y = variables.Variable(2.0, trainable=False) var_z = variables.Variable(2.0, trainable=True) var_t = variables.Variable( 2.0, trainable=True, collections=[ ops.GraphKeys.TRAINABLE_VARIABLES, ops.GraphKeys.GLOBAL_VARIABLES ]) self.assertEqual([var_x, var_y, var_z, var_t], variables.global_variables()) self.assertEqual([var_x, var_z, var_t], variables.trainable_variables()) def testCollectionsWithScope(self): with self.test_session(): with ops.name_scope("scope_1"): var_x = variables.Variable(2.0) with ops.name_scope("scope_2"): var_y = variables.Variable(2.0) self.assertEqual([var_x, var_y], variables.global_variables()) self.assertEqual([var_x], variables.global_variables("scope_1")) self.assertEqual([var_y], variables.global_variables("scope_2")) self.assertEqual([var_x, var_y], variables.trainable_variables()) self.assertEqual([var_x], variables.trainable_variables("scope_1")) self.assertEqual([var_y], variables.trainable_variables("scope_2")) def testOperators(self): with self.test_session(): var_f = variables.Variable([2.0]) add = var_f + 0.0 radd = 1.0 + var_f sub = var_f - 1.0 rsub = 1.0 - var_f mul = var_f * 10.0 rmul = 10.0 * var_f div = var_f / 10.0 rdiv = 10.0 / var_f lt = var_f < 3.0 rlt = 3.0 < var_f le = var_f <= 2.0 rle = 2.0 <= var_f gt = var_f > 3.0 rgt = 3.0 > var_f ge = var_f >= 2.0 rge = 2.0 >= var_f neg = -var_f abs_v = abs(var_f) var_i = variables.Variable([20]) mod = var_i % 7 rmod = 103 % var_i var_b = variables.Variable([True, False]) and_v = operator.and_(var_b, [True, True]) or_v = operator.or_(var_b, [False, True]) xor_v = operator.xor(var_b, [False, False]) invert_v = ~var_b rnd = np.random.rand(4, 4).astype("f") var_t = variables.Variable(rnd) slice_v = var_t[2, 0:0] var_m = variables.Variable([[2.0, 3.0]]) matmul = var_m.__matmul__([[10.0], [20.0]]) rmatmul = var_m.__rmatmul__([[10.0], [20.0]]) variables.global_variables_initializer().run() self.assertAllClose([2.0], add.eval()) self.assertAllClose([3.0], radd.eval()) self.assertAllClose([1.0], sub.eval()) self.assertAllClose([-1.0], rsub.eval()) self.assertAllClose([20.0], mul.eval()) self.assertAllClose([20.0], rmul.eval()) self.assertAllClose([0.2], div.eval()) self.assertAllClose([5.0], rdiv.eval()) self.assertAllClose([-2.0], neg.eval()) self.assertAllClose([2.0], abs_v.eval()) self.assertAllClose([True], lt.eval()) self.assertAllClose([False], rlt.eval()) self.assertAllClose([True], le.eval()) self.assertAllClose([True], rle.eval()) self.assertAllClose([False], gt.eval()) self.assertAllClose([True], rgt.eval()) self.assertAllClose([True], ge.eval()) self.assertAllClose([True], rge.eval()) self.assertAllClose([6], mod.eval()) self.assertAllClose([3], rmod.eval()) self.assertAllClose([True, False], and_v.eval()) self.assertAllClose([True, True], or_v.eval()) self.assertAllClose([True, False], xor_v.eval()) self.assertAllClose([False, True], invert_v.eval()) self.assertAllClose(rnd[2, 0:0], slice_v.eval()) self.assertAllClose([[80.0]], matmul.eval()) self.assertAllClose([[20.0, 30.0], [40.0, 60.0]], rmatmul.eval()) def testSession(self): with self.test_session() as sess: var = variables.Variable([1, 12]) variables.global_variables_initializer().run() self.assertAllClose([1, 12], sess.run(var)) def testDevicePlacement(self): with self.test_session() as sess: with ops.device("/cpu:0"): var = variables.Variable([1, 12]) init_value = var.initialized_value() init_op = variables.global_variables_initializer() self.assertEqual(var.op.device, init_value.device) self.assertEqual(var.op.device, init_op.device) sess.run(init_op) def testColocation(self): with ops.device("/job:ps"): var = variables.Variable(0, name="v") with ops.device("/job:worker/task:7"): assign_op = var.assign(1) self.assertDeviceEqual("/job:ps", assign_op.device) self.assertEqual([b"loc:@v"], assign_op.op.colocation_groups()) def testInitializerFunction(self): value = [[-42], [133.7]] shape = [2, 1] with self.test_session(): initializer = lambda: constant_op.constant(value) v1 = variables.Variable(initializer, dtype=dtypes.float32) self.assertEqual(shape, v1.get_shape()) self.assertEqual(shape, v1.shape) self.assertAllClose(value, v1.initial_value.eval()) with self.assertRaises(errors_impl.FailedPreconditionError): v1.eval() v2 = variables.Variable( math_ops.negative(v1.initialized_value()), dtype=dtypes.float32) self.assertEqual(v1.get_shape(), v2.get_shape()) self.assertEqual(v1.shape, v2.shape) self.assertAllClose(np.negative(value), v2.initial_value.eval()) with self.assertRaises(errors_impl.FailedPreconditionError): v2.eval() variables.global_variables_initializer().run() self.assertAllClose(np.negative(value), v2.eval()) def testConstraintArg(self): constraint = lambda x: x v = variables.Variable( lambda: constant_op.constant(1.), constraint=constraint) self.assertEqual(v.constraint, constraint) constraint = 0 with self.assertRaises(ValueError): v = variables.Variable( lambda: constant_op.constant(1.), constraint=constraint) def testNoRefDataRace(self): with self.test_session(): a = variables.Variable([1, 2, 3], dtype=dtypes.float32) b = variables.Variable(a.initialized_value() + 2) c = variables.Variable(b.initialized_value() + 2) variables.global_variables_initializer().run() self.assertAllEqual(a.eval(), [1, 2, 3]) self.assertAllEqual(b.eval(), [3, 4, 5]) self.assertAllEqual(c.eval(), [5, 6, 7]) def testInitializerFunctionDevicePlacement(self): with self.test_session(): initializer = lambda: constant_op.constant(42.0) with ops.device("/cpu:100"): v1 = variables.Variable(initializer, dtype=dtypes.float32, name="v1") expected_device = "/device:CPU:100" expected_group_v1 = [b"loc:@v1"] self.assertEqual(expected_device, v1.op.device) self.assertEqual(expected_group_v1, v1.op.colocation_groups()) for i in v1.initializer.inputs: self.assertEqual(expected_group_v1, i.op.colocation_groups()) v2 = variables.Variable(initializer, dtype=dtypes.float32, name="v2") expected_group_v2 = [b"loc:@v2"] self.assertEqual(expected_group_v2, v2.op.colocation_groups()) for i in v2.initializer.inputs: self.assertEqual(expected_group_v2, i.op.colocation_groups()) def testVariableDefInitializedInstances(self): with ops.Graph().as_default(), self.test_session() as sess: v_def = variables.Variable( initial_value=constant_op.constant(3.0)).to_proto() with ops.Graph().as_default(), self.test_session() as sess: # v describes a VariableDef-based variable without an initial value. v = variables.Variable(variable_def=v_def) self.assertEqual(3.0, sess.run(v.initialized_value())) # initialized_value should not rerun the initializer_op if the variable # has already been initialized elsewhere. sess.run(v.assign(1.0)) self.assertEqual(1.0, v.initialized_value().eval()) v_def.ClearField("initial_value_name") with ops.Graph().as_default(), self.test_session() as sess: # Restoring a legacy VariableDef proto that does not have # initial_value_name set should still work. v = variables.Variable(variable_def=v_def) # We should also be able to re-export the variable to a new meta graph. self.assertProtoEquals(v_def, v.to_proto()) # But attempts to use initialized_value will result in errors. with self.assertRaises(ValueError): sess.run(v.initialized_value()) def testLoad(self): with self.test_session(): var = variables.Variable(np.zeros((5, 5), np.float32)) variables.global_variables_initializer().run() var.load(np.ones((5, 5), np.float32)) self.assertAllClose(np.ones((5, 5), np.float32), var.eval()) def testRepr(self): var = variables.Variable(np.zeros((5, 5), np.float32), name='noop') self.assertEqual( "<tf.Variable 'noop:0' shape=(5, 5) dtype=float32_ref>", repr(var)) class IsInitializedTest(test.TestCase): def testNoVars(self): with ops.Graph().as_default(), self.test_session() as sess: uninited = variables.report_uninitialized_variables() self.assertEqual(0, sess.run(uninited).size) def testAssertVariablesInitialized(self): with ops.Graph().as_default(), self.test_session() as sess: v = variables.Variable([1, 2], name="v") w = variables.Variable([3, 4], name="w") _ = v, w uninited = variables.report_uninitialized_variables() self.assertAllEqual(np.array([b"v", b"w"]), sess.run(uninited)) variables.global_variables_initializer().run() self.assertEqual(0, sess.run(uninited).size) def testVariableList(self): with ops.Graph().as_default(), self.test_session() as sess: v = variables.Variable([1, 2], name="v") w = variables.Variable([3, 4], name="w") uninited = variables.report_uninitialized_variables() self.assertAllEqual(np.array([b"v", b"w"]), sess.run(uninited)) sess.run(w.initializer) self.assertAllEqual(np.array([b"v"]), sess.run(uninited)) v.initializer.run() self.assertEqual(0, sess.run(uninited).size) def testZeroSizeVarInitialized(self): with ops.Graph().as_default(), self.test_session() as sess: v = variables.Variable(array_ops.zeros([0, 2]), name="v") uninited = variables.report_uninitialized_variables() v.initializer.run() # not strictly necessary self.assertEqual(0, sess.run(uninited).size) def testTrainingWithZeroSizeVar(self): with ops.Graph().as_default(), self.test_session() as sess: a = variables.Variable(array_ops.zeros([0, 2])) b = variables.Variable(array_ops.ones([2, 2])) objective = math_ops.reduce_sum(b + math_ops.matmul( a, a, transpose_a=True)) variables.global_variables_initializer().run() do_opt = gradient_descent.GradientDescentOptimizer(0.1).minimize( objective) sess.run([do_opt]) self.assertAllClose([[0.9, 0.9], [0.9, 0.9]], b.eval()) class ObsoleteIsInitializedTest(test.TestCase): def testNoVars(self): with ops.Graph().as_default(): self.assertEqual(None, variables.assert_variables_initialized()) def testVariables(self): with ops.Graph().as_default(), self.test_session() as sess: v = variables.Variable([1, 2]) w = variables.Variable([3, 4]) _ = v, w inited = variables.assert_variables_initialized() with self.assertRaisesOpError("Attempting to use uninitialized value"): sess.run(inited) variables.global_variables_initializer().run() sess.run(inited) def testVariableList(self): with ops.Graph().as_default(), self.test_session() as sess: v = variables.Variable([1, 2]) w = variables.Variable([3, 4]) inited = variables.assert_variables_initialized([v]) with self.assertRaisesOpError("Attempting to use uninitialized value"): inited.op.run() sess.run(w.initializer) with self.assertRaisesOpError("Attempting to use uninitialized value"): inited.op.run() v.initializer.run() inited.op.run() class PartitionedVariableTest(test.TestCase): def testPartitionedVariable(self): with ops.Graph().as_default(): v0 = variables.Variable([0]) v1 = variables.Variable([1]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) v1._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1])) partitions = [2] # Pass variable_list as [v1, v0] to ensure they are properly # re-sorted to [v0, v1] based on their slice info offsets. partitioned_variable = variables.PartitionedVariable( name="two_vars", shape=[2], dtype=v0.dtype, variable_list=[v1, v0], partitions=partitions) concatenated = ops.convert_to_tensor(partitioned_variable) num_partitions = len(partitioned_variable) iterated_partitions = list(partitioned_variable) self.assertEqual(2, num_partitions) self.assertEqual([v0, v1], iterated_partitions) self.assertEqual([2], concatenated.get_shape()) self.assertEqual([2], concatenated.shape) def testPartitionedVariableFailures(self): with ops.Graph().as_default(): with self.assertRaisesRegexp(ValueError, "empty"): variables.PartitionedVariable( name="fail", shape=2, dtype=dtypes.int32, variable_list=[], partitions=[]) with self.assertRaisesRegexp(ValueError, "must have a save_slice_info"): v0 = variables.Variable([0]) partitions = [1] variables.PartitionedVariable( name="two_vars", shape=[1], dtype=v0.dtype, variable_list=[v0], partitions=partitions) with self.assertRaisesRegexp(ValueError, "full shapes must match"): v0 = variables.Variable([0]) v1 = variables.Variable([1]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) v1._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1])) partitions = [2] variables.PartitionedVariable( name="two_vars", shape=[3], dtype=v0.dtype, variable_list=[v1, v0], partitions=partitions) with self.assertRaisesRegexp(ValueError, "must be positive"): v0 = variables.Variable([0]) v0._set_save_slice_info( variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1])) partitions = [0] variables.PartitionedVariable( name="two_vars", shape=[2], dtype=v0.dtype, variable_list=[v0], partitions=partitions) class VariableContainerTest(test.TestCase): def testContainer(self): with ops.Graph().as_default(): v0 = variables.Variable([0]) with ops.container("l1"): v1 = variables.Variable([1]) with ops.container("l2"): v2 = variables.Variable([2]) special_v = gen_state_ops._variable( shape=[1], dtype=dtypes.float32, name="VariableInL3", container="l3", shared_name="") v3 = variables.Variable([3]) v4 = variables.Variable([4]) self.assertEqual(compat.as_bytes(""), v0.op.get_attr("container")) self.assertEqual(compat.as_bytes("l1"), v1.op.get_attr("container")) self.assertEqual(compat.as_bytes("l2"), v2.op.get_attr("container")) self.assertEqual(compat.as_bytes("l3"), special_v.op.get_attr("container")) self.assertEqual(compat.as_bytes("l1"), v3.op.get_attr("container")) self.assertEqual(compat.as_bytes(""), v4.op.get_attr("container")) if __name__ == "__main__": test.main()<|fim▁end|>
# See the License for the specific language governing permissions and # limitations under the License.
<|file_name|>remove_double_vertices_and_faces.py<|end_file_name|><|fim▁begin|>#----------------------------------------------------------------------------- #remove duplicates v1.3 #best way to remove duplicates, just select the objects you want the duplicates removed, then run this scrpit import bpy for obj in bpy.context.selected_objects: if obj.type == 'MESH': bpy.data.scenes[0].objects.active = obj # make obj active to do operations on it bpy.ops.object.mode_set(mode='OBJECT', toggle=False) # set 3D View to Object Mode (probably redundant) bpy.ops.object.mode_set(mode='EDIT', toggle=False) # set 3D View to Edit Mode bpy.context.tool_settings.mesh_select_mode = [False, False, True] # set to face select in 3D View Editor bpy.ops.mesh.select_all(action='SELECT') # make sure all faces in mesh are selected bpy.ops.object.mode_set(mode='OBJECT', toggle=False) # very silly, you have to be in object mode to select faces!! found = set([]) # set of found sorted vertices pairs for face in obj.data.polygons: facevertsorted = sorted(face.vertices[:]) # sort vertices of the face to compare later if str(facevertsorted) not in found: # if sorted vertices are not in the set found.add(str(facevertsorted)) # add them in the set obj.data.polygons[face.index].select = False # deselect faces i want to keep <|fim▁hole|> bpy.ops.mesh.normals_make_consistent(inside=False) # recalculate normals bpy.ops.mesh.remove_doubles(threshold=0.0001, use_unselected=False) #remove doubles bpy.ops.mesh.normals_make_consistent(inside=False) # recalculate normals (this one or two lines above is redundant) bpy.ops.object.mode_set(mode='OBJECT', toggle=False) # set to Object Mode AGAIN<|fim▁end|>
bpy.ops.object.mode_set(mode='EDIT', toggle=False) # set to Edit Mode AGAIN bpy.ops.mesh.delete(type='FACE') # delete double faces bpy.ops.mesh.select_all(action='SELECT')
<|file_name|>PacketSocket.cpp<|end_file_name|><|fim▁begin|>#include <assert.h> #include <stdint.h> #include <OpenP2P/Buffer.hpp> #include <OpenP2P/Stream/BinaryStream.hpp> #include <OpenP2P/Event/Source.hpp> #include <OpenP2P/Event/Wait.hpp> #include <OpenP2P/RootNetwork/Endpoint.hpp> #include <OpenP2P/RootNetwork/Packet.hpp> #include <OpenP2P/RootNetwork/PacketSocket.hpp> #include <OpenP2P/RootNetwork/SignedPacket.hpp> <|fim▁hole|> PacketSocket::PacketSocket(Socket<UDP::Endpoint, Buffer>& udpSocket) : udpSocket_(udpSocket) { } bool PacketSocket::isValid() const { return udpSocket_.isValid(); } Event::Source PacketSocket::eventSource() const { return udpSocket_.eventSource(); } bool PacketSocket::receive(Endpoint& endpoint, SignedPacket& signedPacket) { UDP::Endpoint udpEndpoint; Buffer buffer; if (!udpSocket_.receive(udpEndpoint, buffer)) { return false; } endpoint.kind = Endpoint::UDPIPV6; endpoint.udpEndpoint = udpEndpoint; BufferIterator bufferIterator(buffer); BinaryIStream blockingReader(bufferIterator); signedPacket.packet = ReadPacket(blockingReader); signedPacket.signature = ReadSignature(blockingReader); return true; } bool PacketSocket::send(const Endpoint& endpoint, const SignedPacket& signedPacket) { assert(endpoint.kind == Endpoint::UDPIPV6); Buffer buffer; BufferBuilder bufferBuilder(buffer); BinaryOStream blockingWriter(bufferBuilder); WritePacket(blockingWriter, signedPacket.packet); WriteSignature(blockingWriter, signedPacket.signature); return udpSocket_.send(endpoint.udpEndpoint, buffer); } } }<|fim▁end|>
namespace OpenP2P { namespace RootNetwork {
<|file_name|>bitcoin_gu.ts<|end_file_name|><|fim▁begin|><TS version="2.1" language="gu"> <context> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation type="unfinished">સરનામું અથવા લેબલ બદલવા માટે જમણું-ક્લિક કરો</translation> </message> <message> <source>Create a new address</source> <translation type="unfinished">નવું સરનામું બનાવો</translation> </message> <message> <source>&amp;New</source> <translation type="unfinished">નવું</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation type="unfinished">હાલમાં પસંદ કરેલા સરનામાંને સિસ્ટમ ક્લિપબોર્ડ પર નકલ કરો</translation> </message> <message> <source>&amp;Copy</source> <translation type="unfinished">&amp; નકલ કરો</translation> </message> <message> <source>C&amp;lose</source> <translation type="unfinished">&amp; બંધ કરો</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation type="unfinished">સૂચિમાંથી હાલમાં પસંદ કરેલું સરનામું કાઢી નાખો</translation> </message> <message> <source>Enter address or label to search</source> <translation type="unfinished">શોધવા માટે સરનામું અથવા લેબલ દાખલ કરો</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation type="unfinished">હાલ માં પસંદ કરેલ માહિતી ને ફાઇલમાં નિકાસ કરો</translation> </message> <message> <source>&amp;Export</source> <translation type="unfinished">&amp; નિકાસ કરો</translation> </message> <message> <source>&amp;Delete</source> <translation type="unfinished">&amp; કાઢી નાખો</translation> </message> <message> <source>Choose the address to send coins to</source> <translation type="unfinished">સિક્કા મોકલવા માટે સરનામું પસંદ કરો</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation type="unfinished">સિક્કા મેળવવા માટે સરનામું પસંદ કરો</translation> </message> <message> <source>C&amp;hoose</source> <translation type="unfinished">&amp; પસંદ કરો</translation> </message> <message> <source>Sending addresses</source> <translation type="unfinished">મોકલવા માટે ના સરનામાં</translation> </message> <message> <source>Receiving addresses</source> <translation type="unfinished">મેળવવા માટે ના સરનામાં</translation> </message> <message> <source>These are your Particl addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished">આ તમારા ચુકવણી કરવા માટે ના સરનામાં છે, હંમેશા કિંમત અને મોકલવાના ના સરનામાં ચકાસી લેવા સિક્કા આપતા પહેલા.</translation> </message> <message> <source>These are your Particl addresses for receiving payments. Use the 'Create new receiving address' button in the receive tab to create new addresses. Signing is only possible with addresses of the type 'legacy'.</source> <translation type="unfinished">આ તમારુ ચૂકવણું લેવા માટે નું સરનામા છે. નવું સરનામું બનાવા માટે "મેળવવા" માટે ની ટેબ માં "ચૂકવણું લેવા માટે નવું સરનામુ બનાવો" બટન વાપરો. ડિજિટલી સહી કરવા માટે 'legacy એટલેકે જુના પ્રકાર નુ' પ્રકાર નું સરનામું હોવું જરૂરી છે.</translation> </message> <message> <source>&amp;Copy Address</source> <translation type="unfinished">&amp; સરનામુ નકલ કરો</translation> </message> <message> <source>Copy &amp;Label</source> <translation type="unfinished">નકલ &amp; લેબલ</translation> </message> <message> <source>&amp;Edit</source> <translation type="unfinished">&amp; બદલો</translation> </message> <message> <source>Export Address List</source> <translation type="unfinished">સરનામાં ની સૂચિ નો નિકાસ કરો</translation> </message> <message> <source>Exporting Failed</source> <translation type="unfinished">નિકાસ ની પ્ર્રાક્રિયા નિષ્ફળ ગયેલ છે</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation type="unfinished">ચિઠ્ઠી</translation> </message> <message> <source>Address</source> <translation type="unfinished">સરનામુ</translation> </message> <message> <source>(no label)</source> <translation type="unfinished">લેબલ નથી</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation type="unfinished">ગુપ્ત શબ્દ માટે નુ ડાયલોગ</translation> </message> <message> <source>Enter passphrase</source> <translation type="unfinished">ગુપ્ત શબ્દ દાખલ કરો </translation> </message> <message> <source>New passphrase</source> <translation type="unfinished">નવો ગુપ્ત શબ્દ</translation> </message> <message> <source>Repeat new passphrase</source> <translation type="unfinished">ગુપ્ત શબ્દ ફરી નાખો</translation> </message> <message> <source>Show passphrase</source> <translation type="unfinished">ગુપ્ત શબ્દ જોવો</translation> </message> <message> <source>Encrypt wallet</source> <translation type="unfinished">સાંકેતિક પાકીટ</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation type="unfinished">પાકીટ અવલોકન જરુરી છે પાકીટ ઓપન કરવા માટે</translation> </message> <message> <source>Unlock wallet</source> <translation type="unfinished">પાકીટ ખુલ્લુ</translation> </message> <message> <source>Change passphrase</source> <translation type="unfinished">ગુપ્ત શબ્દ બદલો</translation> </message> <message> <source>Confirm wallet encryption</source> <translation type="unfinished">એન્ક્રિપ્શન ખાતરી કરો </translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR PARTICL&lt;/b&gt;!</source> <translation type="unfinished">ચેતવણી: જો તમે તમારું વletલેટ એન્ક્રિપ્ટ કરો છો અને તમારો પાસફ્રેઝ ખોવાઈ જાય છે, તો તમે તમારા બધા બિટકોઇન્સ ગુમાવશો!</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation type="unfinished">શું તમે ખરેખર તમારા પાકીટને એન્ક્રિપ્ટ કરવા માંગો છો?</translation> </message> <message> <source>Wallet encrypted</source> <translation type="unfinished">પાકીટ એન્ક્રિપ્ટ થયેલ</translation> </message> </context> <context> <name>QObject</name> <message numerus="yes"> <source>%n second(s)</source><|fim▁hole|> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message numerus="yes"> <source>%n minute(s)</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message numerus="yes"> <source>%n hour(s)</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message numerus="yes"> <source>%n year(s)</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> </context> <context> <name>BitcoinGUI</name> <message numerus="yes"> <source>Processed %n block(s) of transaction history.</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message numerus="yes"> <source>%n active connection(s) to Particl network.</source> <extracomment>A substring of the tooltip.</extracomment> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>(no label)</source> <translation type="unfinished">લેબલ નથી</translation> </message> </context> <context> <name>Intro</name> <message numerus="yes"> <source>(sufficient to restore backups %n day(s) old)</source> <extracomment>Explanatory text on the capability of the current prune target.</extracomment> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> </context> <context> <name>PeerTableModel</name> <message> <source>Address</source> <extracomment>Title of Peers Table column which contains the IP/Onion/I2P address of the connected peer.</extracomment> <translation type="unfinished">સરનામુ</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Label</source> <translation type="unfinished">ચિઠ્ઠી</translation> </message> <message> <source>(no label)</source> <translation type="unfinished">લેબલ નથી</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message numerus="yes"> <source>Estimated to begin confirmation within %n block(s).</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> <message> <source>(no label)</source> <translation type="unfinished">લેબલ નથી</translation> </message> </context> <context> <name>TransactionDesc</name> <message numerus="yes"> <source>matures in %n more block(s)</source> <translation type="unfinished"> <numerusform /> <numerusform /> </translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Label</source> <translation type="unfinished">ચિઠ્ઠી</translation> </message> <message> <source>(no label)</source> <translation type="unfinished">લેબલ નથી</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>Label</source> <translation type="unfinished">ચિઠ્ઠી</translation> </message> <message> <source>Address</source> <translation type="unfinished">સરનામુ</translation> </message> <message> <source>Exporting Failed</source> <translation type="unfinished">નિકાસ ની પ્ર્રાક્રિયા નિષ્ફળ ગયેલ છે</translation> </message> </context> <context> <name>WalletView</name> <message> <source>&amp;Export</source> <translation type="unfinished">&amp; નિકાસ કરો</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation type="unfinished">હાલ માં પસંદ કરેલ માહિતી ને ફાઇલમાં નિકાસ કરો</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>router.js<|end_file_name|><|fim▁begin|>define(['views/Index','views/Cart','views/CategoryEdit','views/Categories','views/Product','views/Products','views/ProductEdit','views/ProductDetail','views/admin/Index','models/Product','models/Category','models/CartCollection','models/ProductCollection','models/CategoryCollection'], function(IndexView,CartView,CategoryEditView,CategoriesView,ProductView,ProductsView,ProductEditView,ProductDetailView,AdminIndexView,Product,Category,CartCollection,ProductCollection,CategoryCollection){ var BizRouter = Backbone.Router.extend({ currentView : null, routes: { '': 'index', 'index': 'index', 'cart': 'myCart', 'products(/:id)': 'products', 'product/add(/:cid)': 'productAdd', 'product/edit/:id': 'productEdit', 'product/view/:id': 'productView', 'categories(/:id)': 'categories', 'category/add(/:pid)': 'categoryAdd', 'category/edit/:id': 'categoryEdit', 'admin/index': 'adminIndex', }, changeView: function(view){ if(null != this.currentView){ this.currentView.undelegateEvents(); } this.currentView = view; this.currentView.render(); }, index: function(){ this.changeView(new IndexView()); }, myCart: function(){ var cartCollection = new CartCollection(); cartCollection.url = '/cart'; this.changeView(new CartView({collection:cartCollection})); cartCollection.fetch(); }, /** product related */ products: function(id){ var cid = id || ''; var productCollection = new ProductCollection(); productCollection.url = '/products?cid=' + cid;; this.changeView(new ProductsView({collection: productCollection})); productCollection.fetch(); }, productAdd: function(categoryId){<|fim▁hole|> var productModel = new Product({ main_cat_id: cid, }); this.changeView(new ProductEditView({model: productModel})); }, productEdit: function(id){ var product = new Product(); product.url = '/products/' + id; this.changeView(new ProductEditView({model: product})); product.fetch(); }, productView: function(id){ var product = new Product(); product.url = '/products/' + id; this.changeView(new ProductDetailView({model: product})); product.fetch(); }, /** category related */ categories: function(id){ var pid = id || ''; var categoryCollection = new CategoryCollection(); categoryCollection.url = '/categories?pid=' + pid; this.changeView(new CategoriesView({collection: categoryCollection})); categoryCollection.fetch(); }, categoryAdd: function(parentId){ var pid = parentId || ''; var categoryModel = new Category({ parent: pid }); this.changeView(new CategoryEditView({model: categoryModel})); }, categoryEdit: function(id){ var category = new Category(); category.url = '/categories/' + id; this.changeView(new CategoryEditView({model: category})); category.fetch(); }, adminIndex: function(){ this.changeView(new AdminIndexView()); } }); return new BizRouter(); });<|fim▁end|>
var cid = categoryId || '';
<|file_name|>DynamicFormComponent.ts<|end_file_name|><|fim▁begin|>import { Component, EventEmitter, Input, OnChanges, OnInit, Output, SimpleChanges } from '@angular/core'; import { FormControl, FormGroup } from '@angular/forms'; import { DynamicFormConfig } from 'common/ui/dynamicform/DynamicFormConfig'; declare let $; declare let _; @Component({ selector: 'dynamic-form-component', templateUrl: './DynamicFormComponent.html', styleUrls: ['./DynamicFormComponent.ng.pcss'] }) export class DynamicFormComponent implements OnInit, OnChanges { @Input() config: DynamicFormConfig; @Input() patchValue: { [key: string]: any } = {}; @Output() submitted = new EventEmitter(); form: FormGroup; constructor() { } ngOnInit(): void { this.buildForm(); } ngOnChanges(changes: SimpleChanges): void { if (this.form && this.patchValue) { this.patchForm(this.patchValue) } } submit(event: any) { event.preventDefault(); if (!this.form.valid) { console.log('form invalid'); return; } this.submitted.emit(this.form.value) } public patchForm(obj: { [key: string]: any; }) { let values = {}; _.forEach(obj, (value, key) => { values[key] = value;<|fim▁hole|> this.form.patchValue(values); } private buildForm() { let group = {}; let patchValue = {}; _.forEach(this.config.controls, (control) => { group[control.key] = new FormControl(control.value, control.validator); patchValue[control.key] = control.value; }); this.form = new FormGroup(group); } }<|fim▁end|>
});
<|file_name|>default_test.js<|end_file_name|><|fim▁begin|>describe("", function() { var rootEl; beforeEach(function() { rootEl = browser.rootEl; browser.get("build/docs/examples/example-example100/index.html"); }); it('should format numbers', function() { expect(element(by.id('number-default')).getText()).toBe('1,234.568'); expect(element(by.binding('val | number:0')).getText()).toBe('1,235'); expect(element(by.binding('-val | number:4')).getText()).toBe('-1,234.5679'); }); <|fim▁hole|> expect(element(by.id('number-default')).getText()).toBe('3,374.333'); expect(element(by.binding('val | number:0')).getText()).toBe('3,374'); expect(element(by.binding('-val | number:4')).getText()).toBe('-3,374.3330'); }); });<|fim▁end|>
it('should update', function() { element(by.model('val')).clear(); element(by.model('val')).sendKeys('3374.333');
<|file_name|>network.go<|end_file_name|><|fim▁begin|>package core import ( "os"; "container/list"; "net"; "log"; "irc"; "runloop"; ) type Network struct { name string; server *server; clients *list.List; listen *listenConn; } func newNetwork(name string, serverConn net.Conn, listen net.Listener) *Network { var network *Network; accept := func(conn net.Conn) { runloop.CallLater(func() { network.addClient(conn) })<|fim▁hole|> error := func(err os.Error) { // TODO listener failed }; l := newListenConn(listen, accept, error); network = &Network{name: name, clients: list.New(), listen: l}; network.server = newServer(serverConn, network); return network; } func (network *Network) addClient(conn net.Conn) { // TODO error handler on client for disconnect client := newClient(conn, network); network.clients.PushBack(client); log.Stderrf("client connected from %s\n", conn.RemoteAddr()); } // SendToServer transmits an IRC message to the server. If the server // connection is down then the message is dropped. func (network *Network) SendToServer(msg *irc.Message) { // TODO network may be down, but is this the way to handle it? if network.server != nil { network.server.Send(msg) } } // SendToClients transmits an IRC message to all connected clients. func (network *Network) SendToClients(msg *irc.Message) { for c := range network.clients.Iter() { c.(*client).Send(msg) } } // SendNoticeToClient transmits an IRC NOTICE message to a clients. func (network *Network) SendNoticeToClient(conn Conn, line string) { nick := "bouncin"; // TODO use nick conn.Send(&irc.Message{Command: "NOTICE", Params: []string{nick, line}}); } var networks = make(map[string] *Network); func AddNetwork(name string, server net.Conn, listen net.Listener) *Network { // TODO what if network already exists? network := newNetwork(name, server, listen); networks[name] = network; return network; }<|fim▁end|>
};
<|file_name|>_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings <|fim▁hole|> from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class Operations: """Operations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.rdbms.mariadb.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def list( self, **kwargs: Any ) -> "_models.OperationListResult": """Lists all of the available REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: OperationListResult, or the result of cls(response) :rtype: ~azure.mgmt.rdbms.mariadb.models.OperationListResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" accept = "application/json" # Construct URL url = self.list.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('OperationListResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized list.metadata = {'url': '/providers/Microsoft.DBForMariaDB/operations'} # type: ignore<|fim▁end|>
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Servo, the mighty web browser engine from the future. //! //! This is a very simple library that wires all of Servo's components //! together as type `Servo`, along with a generic client //! implementing the `WindowMethods` trait, to create a working web //! browser. //! //! The `Servo` type is responsible for configuring a //! `Constellation`, which does the heavy lifting of coordinating all //! of Servo's internal subsystems, including the `ScriptThread` and the //! `LayoutThread`, as well maintains the navigation context. //! //! `Servo` is fed events from a generic type that implements the //! `WindowMethods` trait. extern crate env_logger; #[cfg(not(target_os = "windows"))] extern crate gaol; extern crate gleam; extern crate log; pub extern crate bluetooth; pub extern crate bluetooth_traits; pub extern crate canvas; pub extern crate canvas_traits; pub extern crate compositing; pub extern crate constellation; pub extern crate debugger; pub extern crate devtools; pub extern crate devtools_traits; pub extern crate euclid; pub extern crate gfx; pub extern crate ipc_channel; pub extern crate layout_thread; pub extern crate msg; pub extern crate net; pub extern crate net_traits; pub extern crate profile; pub extern crate profile_traits; pub extern crate script; pub extern crate script_traits; pub extern crate script_layout_interface; pub extern crate servo_config; pub extern crate servo_geometry; pub extern crate servo_url; pub extern crate style; pub extern crate style_traits; pub extern crate webrender_api; pub extern crate webvr; pub extern crate webvr_traits; #[cfg(feature = "webdriver")] extern crate webdriver_server; extern crate webrender; #[cfg(feature = "webdriver")] fn webdriver(port: u16, constellation: Sender<ConstellationMsg>) { webdriver_server::start_server(port, constellation); } #[cfg(not(feature = "webdriver"))] fn webdriver(_port: u16, _constellation: Sender<ConstellationMsg>) { } use bluetooth::BluetoothThreadFactory; use bluetooth_traits::BluetoothRequest; use canvas::gl_context::GLContextFactory; use canvas::webgl_thread::WebGLThreads; use compositing::IOCompositor; use compositing::compositor_thread::{self, CompositorProxy, CompositorReceiver, InitialCompositorState}; use compositing::windowing::WindowEvent; use compositing::windowing::WindowMethods; use constellation::{Constellation, InitialConstellationState, UnprivilegedPipelineContent}; use constellation::{FromCompositorLogger, FromScriptLogger}; #[cfg(not(target_os = "windows"))] use constellation::content_process_sandbox_profile; use env_logger::Logger as EnvLogger; #[cfg(not(target_os = "windows"))] use gaol::sandbox::{ChildSandbox, ChildSandboxMethods}; use gfx::font_cache_thread::FontCacheThread; use ipc_channel::ipc::{self, IpcSender}; use log::{Log, LogMetadata, LogRecord}; use net::resource_thread::new_resource_threads; use net_traits::IpcSend; use profile::mem as profile_mem; use profile::time as profile_time; use profile_traits::mem; use profile_traits::time; use script_traits::{ConstellationMsg, SWManagerSenders, ScriptToConstellationChan}; use servo_config::opts; use servo_config::prefs::PREFS; use servo_config::resource_files::resources_dir_path; use std::borrow::Cow; use std::cmp::max; use std::path::PathBuf; use std::rc::Rc; use std::sync::mpsc::{Sender, channel}; use webrender::renderer::RendererKind; use webvr::{WebVRThread, WebVRCompositorHandler}; pub use gleam::gl; pub use servo_config as config; pub use servo_url as url; pub use msg::constellation_msg::TopLevelBrowsingContextId as BrowserId; /// The in-process interface to Servo. /// /// It does everything necessary to render the web, primarily /// orchestrating the interaction between JavaScript, CSS layout, /// rendering, and the client window. /// /// Clients create a `Servo` instance for a given reference-counted type /// implementing `WindowMethods`, which is the bridge to whatever /// application Servo is embedded in. Clients then create an event /// loop to pump messages between the embedding application and /// various browser components. pub struct Servo<Window: WindowMethods + 'static> { compositor: IOCompositor<Window>, constellation_chan: Sender<ConstellationMsg>, } impl<Window> Servo<Window> where Window: WindowMethods + 'static { pub fn new(window: Rc<Window>) -> Servo<Window> { // Global configuration options, parsed from the command line. let opts = opts::get(); // Make sure the gl context is made current. window.prepare_for_composite(0, 0); // Get both endpoints of a special channel for communication between // the client window and the compositor. This channel is unique because // messages to client may need to pump a platform-specific event loop // to deliver the message. let (compositor_proxy, compositor_receiver) = create_compositor_channel(window.create_event_loop_waker()); let supports_clipboard = window.supports_clipboard(); let time_profiler_chan = profile_time::Profiler::create(&opts.time_profiling, opts.time_profiler_trace_path.clone()); let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period); let debugger_chan = opts.debugger_port.map(|port| { debugger::start_server(port) }); let devtools_chan = opts.devtools_port.map(|port| { devtools::start_server(port) }); let mut resource_path = resources_dir_path().unwrap(); resource_path.push("shaders"); let (mut webrender, webrender_api_sender) = { // TODO(gw): Duplicates device_pixels_per_screen_px from compositor. Tidy up! let scale_factor = window.hidpi_factor().get(); let device_pixel_ratio = match opts.device_pixels_per_px { Some(device_pixels_per_px) => device_pixels_per_px, None => match opts.output_file { Some(_) => 1.0, None => scale_factor, } }; let renderer_kind = if opts::get().should_use_osmesa() { RendererKind::OSMesa } else { RendererKind::Native }; let recorder = if opts.webrender_record { let record_path = PathBuf::from("wr-record.bin"); let recorder = Box::new(webrender::BinaryRecorder::new(&record_path)); Some(recorder as Box<webrender::ApiRecordingReceiver>) } else { None }; let mut debug_flags = webrender::renderer::DebugFlags::empty(); debug_flags.set(webrender::renderer::PROFILER_DBG, opts.webrender_stats); webrender::Renderer::new(window.gl(), webrender::RendererOptions { device_pixel_ratio: device_pixel_ratio, resource_override_path: Some(resource_path), enable_aa: opts.enable_text_antialiasing, debug_flags: debug_flags, enable_batcher: opts.webrender_batch, debug: opts.webrender_debug, recorder: recorder, precache_shaders: opts.precache_shaders, enable_scrollbars: opts.output_file.is_none(), renderer_kind: renderer_kind, enable_subpixel_aa: opts.enable_subpixel_text_antialiasing, ..Default::default() }).expect("Unable to initialize webrender!") }; let webrender_api = webrender_api_sender.create_api(); let webrender_document = webrender_api.add_document(window.framebuffer_size()); // Important that this call is done in a single-threaded fashion, we // can't defer it after `create_constellation` has started. script::init(); // Create the constellation, which maintains the engine // pipelines, including the script and layout threads, as well // as the navigation context. let (constellation_chan, sw_senders) = create_constellation(opts.user_agent.clone(), opts.config_dir.clone(), compositor_proxy.clone_compositor_proxy(), time_profiler_chan.clone(), mem_profiler_chan.clone(), debugger_chan, devtools_chan, supports_clipboard, &mut webrender, webrender_document, webrender_api_sender); // Send the constellation's swmanager sender to service worker manager thread script::init_service_workers(sw_senders); if cfg!(feature = "webdriver") { if let Some(port) = opts.webdriver_port { webdriver(port, constellation_chan.clone()); } } // The compositor coordinates with the client window to create the final // rendered page and display it somewhere. let compositor = IOCompositor::create(window, InitialCompositorState { sender: compositor_proxy, receiver: compositor_receiver, constellation_chan: constellation_chan.clone(), time_profiler_chan: time_profiler_chan, mem_profiler_chan: mem_profiler_chan, webrender, webrender_document, webrender_api, }); Servo { compositor: compositor, constellation_chan: constellation_chan, } } pub fn handle_events(&mut self, events: Vec<WindowEvent>) -> bool { self.compositor.handle_events(events) } pub fn repaint_synchronously(&mut self) { self.compositor.repaint_synchronously() } pub fn pinch_zoom_level(&self) -> f32 { self.compositor.pinch_zoom_level() } pub fn setup_logging(&self) { let constellation_chan = self.constellation_chan.clone(); log::set_logger(|max_log_level| { let env_logger = EnvLogger::new(); let con_logger = FromCompositorLogger::new(constellation_chan); let filter = max(env_logger.filter(), con_logger.filter()); let logger = BothLogger(env_logger, con_logger); max_log_level.set(filter); Box::new(logger) }).expect("Failed to set logger.") } } fn create_compositor_channel(event_loop_waker: Box<compositor_thread::EventLoopWaker>) -> (CompositorProxy, CompositorReceiver) { let (sender, receiver) = channel(); (CompositorProxy { sender: sender, event_loop_waker: event_loop_waker, }, CompositorReceiver { receiver: receiver }) } fn create_constellation(user_agent: Cow<'static, str>, config_dir: Option<PathBuf>, compositor_proxy: CompositorProxy, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: mem::ProfilerChan, debugger_chan: Option<debugger::Sender>, devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>, supports_clipboard: bool, webrender: &mut webrender::Renderer, webrender_document: webrender_api::DocumentId, webrender_api_sender: webrender_api::RenderApiSender) -> (Sender<ConstellationMsg>, SWManagerSenders) { let bluetooth_thread: IpcSender<BluetoothRequest> = BluetoothThreadFactory::new(); let (public_resource_threads, private_resource_threads) = new_resource_threads(user_agent, devtools_chan.clone(), time_profiler_chan.clone(), config_dir); let font_cache_thread = FontCacheThread::new(public_resource_threads.sender(), Some(webrender_api_sender.create_api())); let resource_sender = public_resource_threads.sender(); let (webvr_chan, webvr_constellation_sender, webvr_compositor) = if PREFS.is_webvr_enabled() { // WebVR initialization let (mut handler, sender) = WebVRCompositorHandler::new(); let (webvr_thread, constellation_sender) = WebVRThread::spawn(sender); handler.set_webvr_thread_sender(webvr_thread.clone()); (Some(webvr_thread), Some(constellation_sender), Some(handler)) } else { (None, None, None) }; // GLContext factory used to create WebGL Contexts let gl_factory = if opts::get().should_use_osmesa() { GLContextFactory::current_osmesa_handle().unwrap() } else { GLContextFactory::current_native_handle(&compositor_proxy).unwrap() }; // Initialize WebGL Thread entry point. let (webgl_threads, image_handler) = WebGLThreads::new(gl_factory, webrender_api_sender.clone(), webvr_compositor.map(|c| c as Box<_>)); // Set webrender external image handler for WebGL textures webrender.set_external_image_handler(image_handler); let initial_state = InitialConstellationState { compositor_proxy, debugger_chan, devtools_chan, bluetooth_thread, font_cache_thread, public_resource_threads, private_resource_threads, time_profiler_chan, mem_profiler_chan, supports_clipboard, webrender_document, webrender_api_sender, webgl_threads, webvr_chan, }; let (constellation_chan, from_swmanager_sender) = Constellation::<script_layout_interface::message::Msg, layout_thread::LayoutThread, script::script_thread::ScriptThread>::start(initial_state); if let Some(webvr_constellation_sender) = webvr_constellation_sender { // Set constellation channel used by WebVR thread to broadcast events webvr_constellation_sender.send(constellation_chan.clone()).unwrap(); } // channels to communicate with Service Worker Manager let sw_senders = SWManagerSenders { swmanager_sender: from_swmanager_sender, resource_sender: resource_sender }; (constellation_chan, sw_senders) } // A logger that logs to two downstream loggers. // This should probably be in the log crate. struct BothLogger<Log1, Log2>(Log1, Log2); impl<Log1, Log2> Log for BothLogger<Log1, Log2> where Log1: Log, Log2: Log { fn enabled(&self, metadata: &LogMetadata) -> bool { self.0.enabled(metadata) || self.1.enabled(metadata) } fn log(&self, record: &LogRecord) { self.0.log(record); self.1.log(record); } } pub fn set_logger(script_to_constellation_chan: ScriptToConstellationChan) { log::set_logger(|max_log_level| { let env_logger = EnvLogger::new(); let con_logger = FromScriptLogger::new(script_to_constellation_chan); let filter = max(env_logger.filter(), con_logger.filter()); let logger = BothLogger(env_logger, con_logger); max_log_level.set(filter); Box::new(logger) }).expect("Failed to set logger.") } /// Content process entry point. pub fn run_content_process(token: String) { let (unprivileged_content_sender, unprivileged_content_receiver) = ipc::channel::<UnprivilegedPipelineContent>().unwrap(); let connection_bootstrap: IpcSender<IpcSender<UnprivilegedPipelineContent>> = IpcSender::connect(token).unwrap(); connection_bootstrap.send(unprivileged_content_sender).unwrap(); let unprivileged_content = unprivileged_content_receiver.recv().unwrap(); opts::set_defaults(unprivileged_content.opts()); PREFS.extend(unprivileged_content.prefs()); set_logger(unprivileged_content.script_to_constellation_chan().clone()); // Enter the sandbox if necessary.<|fim▁hole|> create_sandbox(); } // send the required channels to the service worker manager let sw_senders = unprivileged_content.swmanager_senders(); script::init(); script::init_service_workers(sw_senders); unprivileged_content.start_all::<script_layout_interface::message::Msg, layout_thread::LayoutThread, script::script_thread::ScriptThread>(true); } // This is a workaround for https://github.com/rust-lang/rust/pull/30175 until // https://github.com/lfairy/rust-errno/pull/5 lands, and should be removed once // we update Servo with the rust-errno crate. #[cfg(target_os = "android")] #[no_mangle] pub unsafe extern fn __errno_location() -> *mut i32 { extern { fn __errno() -> *mut i32; } __errno() } #[cfg(not(target_os = "windows"))] fn create_sandbox() { ChildSandbox::new(content_process_sandbox_profile()).activate() .expect("Failed to activate sandbox!"); } #[cfg(target_os = "windows")] fn create_sandbox() { panic!("Sandboxing is not supported on Windows."); }<|fim▁end|>
if opts::get().sandbox {
<|file_name|>yuzi.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'; import { CheckboxModule } from './checkbox/checkbox.module'; import { ModalModule } from './modal/modal.module'; import { RadioModule } from './radio/radio.module'; import { SelectModule } from './select/select.module'; import { SharedModule } from './shared/shared.module';<|fim▁hole|>const yuziModules = [ CheckboxModule, ModalModule, RadioModule, SelectModule, SharedModule ]; @NgModule({ imports: yuziModules, exports: yuziModules }) export class YuziModule {}<|fim▁end|>
<|file_name|>4_6.py<|end_file_name|><|fim▁begin|><|fim▁hole|>l = list(range(1, 20 + 1, 2)) # this is wasteful in this program for i in l: print(i)<|fim▁end|>
<|file_name|>core.py<|end_file_name|><|fim▁begin|>import datetime import logging JRD_TYPES = ('application/json', 'application/xrd+json', 'text/json') XRD_TYPES = ('application/xrd+xml', 'text/xml') logger = logging.getLogger("rd") def _is_str(s): try: return isinstance(s, basestring) except NameError: return isinstance(s, str) def loads(content, content_type): from rd import jrd, xrd content_type = content_type.split(";")[0] if content_type in JRD_TYPES: logger.debug("loads() loading JRD") return jrd.loads(content) elif content_type in XRD_TYPES: logger.debug("loads() loading XRD") return xrd.loads(content) # # special XRD types # class Attribute(object): def __init__(self, name, value): self.name = name self.value = value def __cmp__(self, other): return cmp(str(self), str(other)) def __eq__(self, other): return str(self) == other def __str__(self): return "%s=%s" % (self.name, self.value) class Element(object): def __init__(self, name, value, attrs=None): self.name = name self.value = value self.attrs = attrs or {} class Title(object): def __init__(self, value, lang=None): self.value = value self.lang = lang def __cmp__(self, other): return cmp(str(self), str(other)) def __eq__(self, other): return str(self) == str(other) def __str__(self): if self.lang: return "%s:%s" % (self.lang, self.value) return self.value class Property(object): def __init__(self, type_, value=None): self.type = type_ self.value = value def __cmp__(self, other): return cmp(str(self), str(other)) def __eq__(self, other): return str(self) == other def __str__(self): if self.value: return "%s:%s" % (self.type, self.value) return self.type # # special list types # class ListLikeObject(list): def __setitem__(self, key, value): value = self.item(value) super(ListLikeObject, self).__setitem__(key, value) def append(self, value): value = self.item(value) super(ListLikeObject, self).append(value) def extend(self, values): values = (self.item(value) for value in values) super(ListLikeObject, self).extend(values) class AttributeList(ListLikeObject): def __call__(self, name): for attr in self: if attr.name == name: yield attr def item(self, value): if isinstance(value, (list, tuple)): return Attribute(*value) elif not isinstance(value, Attribute): raise ValueError('value must be an instance of Attribute') return value class ElementList(ListLikeObject): def item(self, value): if not isinstance(value, Element): raise ValueError('value must be an instance of Type') return value class TitleList(ListLikeObject): def item(self, value): if _is_str(value): return Title(value) elif isinstance(value, (list, tuple)): return Title(*value) elif not isinstance(value, Title): raise ValueError('value must be an instance of Title') return value class LinkList(ListLikeObject): def __call__(self, rel): for link in self: if link.rel == rel: yield link def item(self, value): if not isinstance(value, Link): raise ValueError('value must be an instance of Link') return value class PropertyList(ListLikeObject): def __call__(self, type_): for prop in self: if prop.type == type_: yield prop def item(self, value): if _is_str(value): return Property(value) elif isinstance(value, (tuple, list)): return Property(*value) elif not isinstance(value, Property): raise ValueError('value must be an instance of Property') return value # # Link object # class Link(object): def __init__(self, rel=None, type=None, href=None, template=None): self.rel = rel self.type = type self.href = href self.template = template self._titles = TitleList() self._properties = PropertyList() def get_titles(self): return self._titles titles = property(get_titles) def get_properties(self): return self._properties properties = property(get_properties) # # main RD class # class RD(object): def __init__(self, xml_id=None, subject=None): self.xml_id = xml_id self.subject = subject self._expires = None self._aliases = [] self._properties = PropertyList() self._links = LinkList() self._signatures = [] self._attributes = AttributeList() self._elements = ElementList() # ser/deser methods def to_json(self): from rd import jrd return jrd.dumps(self) def to_xml(self): from rd import xrd return xrd.dumps(self) # helper methods def find_link(self, rels, attr=None):<|fim▁hole|> if attr: return getattr(link, attr, None) return link # custom elements and attributes def get_elements(self): return self._elements elements = property(get_elements) @property def attributes(self): return self._attributes # defined elements and attributes def get_expires(self): return self._expires def set_expires(self, expires): if not isinstance(expires, datetime.datetime): raise ValueError('expires must be a datetime object') self._expires = expires expires = property(get_expires, set_expires) def get_aliases(self): return self._aliases aliases = property(get_aliases) def get_properties(self): return self._properties properties = property(get_properties) def get_links(self): return self._links links = property(get_links) def get_signatures(self): return self._signatures signatures = property(get_links)<|fim▁end|>
if not isinstance(rels, (list, tuple)): rels = (rels,) for link in self.links: if link.rel in rels:
<|file_name|>ner.py<|end_file_name|><|fim▁begin|>import math import gzip import paddle.v2 as paddle import paddle.v2.evaluator as evaluator import conll03 import itertools # init dataset train_data_file = 'data/train' test_data_file = 'data/test' vocab_file = 'data/vocab.txt' target_file = 'data/target.txt' emb_file = 'data/wordVectors.txt' train_data_reader = conll03.train(train_data_file, vocab_file, target_file) test_data_reader = conll03.test(test_data_file, vocab_file, target_file) word_dict, label_dict = conll03.get_dict(vocab_file, target_file) word_vector_values = conll03.get_embedding(emb_file) # init hyper-params word_dict_len = len(word_dict) label_dict_len = len(label_dict) mark_dict_len = 2 word_dim = 50 mark_dim = 5 hidden_dim = 300 mix_hidden_lr = 1e-3 default_std = 1 / math.sqrt(hidden_dim) / 3.0 emb_para = paddle.attr.Param( name='emb', initial_std=math.sqrt(1. / word_dim), is_static=True) std_0 = paddle.attr.Param(initial_std=0.) std_default = paddle.attr.Param(initial_std=default_std) def d_type(size): return paddle.data_type.integer_value_sequence(size) def ner_net(is_train): word = paddle.layer.data(name='word', type=d_type(word_dict_len)) mark = paddle.layer.data(name='mark', type=d_type(mark_dict_len)) word_embedding = paddle.layer.mixed( name='word_embedding', size=word_dim, input=paddle.layer.table_projection(input=word, param_attr=emb_para)) mark_embedding = paddle.layer.mixed( name='mark_embedding', size=mark_dim, input=paddle.layer.table_projection(input=mark, param_attr=std_0)) emb_layers = [word_embedding, mark_embedding] word_caps_vector = paddle.layer.concat( name='word_caps_vector', input=emb_layers) hidden_1 = paddle.layer.mixed( name='hidden1', size=hidden_dim, act=paddle.activation.Tanh(), bias_attr=std_default, input=[ paddle.layer.full_matrix_projection( input=word_caps_vector, param_attr=std_default) ]) rnn_para_attr = paddle.attr.Param(initial_std=0.0, learning_rate=0.1) hidden_para_attr = paddle.attr.Param( initial_std=default_std, learning_rate=mix_hidden_lr) rnn_1_1 = paddle.layer.recurrent( name='rnn1-1', input=hidden_1, act=paddle.activation.Relu(), bias_attr=std_0, param_attr=rnn_para_attr) rnn_1_2 = paddle.layer.recurrent( name='rnn1-2', input=hidden_1, act=paddle.activation.Relu(), reverse=1, bias_attr=std_0, param_attr=rnn_para_attr) hidden_2_1 = paddle.layer.mixed( name='hidden2-1', size=hidden_dim, bias_attr=std_default, act=paddle.activation.STanh(), input=[ paddle.layer.full_matrix_projection( input=hidden_1, param_attr=hidden_para_attr), paddle.layer.full_matrix_projection( input=rnn_1_1, param_attr=rnn_para_attr) ]) hidden_2_2 = paddle.layer.mixed( name='hidden2-2', size=hidden_dim, bias_attr=std_default, act=paddle.activation.STanh(), input=[ paddle.layer.full_matrix_projection( input=hidden_1, param_attr=hidden_para_attr), paddle.layer.full_matrix_projection( input=rnn_1_2, param_attr=rnn_para_attr) ]) rnn_2_1 = paddle.layer.recurrent( name='rnn2-1', input=hidden_2_1, act=paddle.activation.Relu(), reverse=1, bias_attr=std_0, param_attr=rnn_para_attr) rnn_2_2 = paddle.layer.recurrent( name='rnn2-2', input=hidden_2_2, act=paddle.activation.Relu(), bias_attr=std_0, param_attr=rnn_para_attr) hidden_3 = paddle.layer.mixed( name='hidden3', size=hidden_dim, bias_attr=std_default, act=paddle.activation.STanh(), input=[ paddle.layer.full_matrix_projection( input=hidden_2_1, param_attr=hidden_para_attr), paddle.layer.full_matrix_projection( input=rnn_2_1, param_attr=rnn_para_attr), paddle.layer.full_matrix_projection( input=hidden_2_2, param_attr=hidden_para_attr), paddle.layer.full_matrix_projection( input=rnn_2_2, param_attr=rnn_para_attr) ]) output = paddle.layer.mixed( name='output', size=label_dict_len, bias_attr=False, input=[ paddle.layer.full_matrix_projection( input=hidden_3, param_attr=std_default) ]) if is_train: target = paddle.layer.data(name='target', type=d_type(label_dict_len)) crf_cost = paddle.layer.crf( size=label_dict_len, input=output, label=target, param_attr=paddle.attr.Param( name='crfw', initial_std=default_std, learning_rate=mix_hidden_lr)) crf_dec = paddle.layer.crf_decoding( size=label_dict_len, input=output, label=target, param_attr=paddle.attr.Param(name='crfw')) return crf_cost, crf_dec, target else: predict = paddle.layer.crf_decoding( size=label_dict_len, input=output, param_attr=paddle.attr.Param(name='crfw')) return predict def ner_net_train(data_reader=train_data_reader, num_passes=1): # define network topology crf_cost, crf_dec, target = ner_net(is_train=True) evaluator.sum(name='error', input=crf_dec) evaluator.chunk( name='ner_chunk', input=crf_dec, label=target, chunk_scheme='IOB', num_chunk_types=(label_dict_len - 1) / 2) # create parameters parameters = paddle.parameters.create(crf_cost) parameters.set('emb', word_vector_values) # create optimizer optimizer = paddle.optimizer.Momentum( momentum=0, learning_rate=2e-4, regularization=paddle.optimizer.L2Regularization(rate=8e-4), gradient_clipping_threshold=25, model_average=paddle.optimizer.ModelAverage( average_window=0.5, max_average_window=10000), ) trainer = paddle.trainer.SGD( cost=crf_cost, parameters=parameters, update_equation=optimizer, extra_layers=crf_dec) reader = paddle.batch( paddle.reader.shuffle(data_reader, buf_size=8192), batch_size=64) feeding = {'word': 0, 'mark': 1, 'target': 2} def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 100 == 0: print "Pass %d, Batch %d, Cost %f, %s" % ( event.pass_id, event.batch_id, event.cost, event.metrics) if event.batch_id % 1000 == 0: result = trainer.test(reader=reader, feeding=feeding) print "\nTest with Pass %d, Batch %d, %s" % ( event.pass_id, event.batch_id, result.metrics) if isinstance(event, paddle.event.EndPass): # save parameters with gzip.open('params_pass_%d.tar.gz' % event.pass_id, 'w') as f: parameters.to_tar(f) result = trainer.test(reader=reader, feeding=feeding) print "\nTest with Pass %d, %s" % (event.pass_id, result.metrics) trainer.train( reader=reader, event_handler=event_handler, num_passes=num_passes, feeding=feeding)<|fim▁hole|> return parameters def ner_net_infer(data_reader=test_data_reader, model_file='ner_model.tar.gz'): test_data = [] test_sentences = [] for item in data_reader(): test_data.append([item[0], item[1]]) test_sentences.append(item[-1]) if len(test_data) == 10: break predict = ner_net(is_train=False) lab_ids = paddle.infer( output_layer=predict, parameters=paddle.parameters.Parameters.from_tar(gzip.open(model_file)), input=test_data, field='id') flat_data = [word for word in itertools.chain.from_iterable(test_sentences)] labels_reverse = {} for (k, v) in label_dict.items(): labels_reverse[v] = k pre_lab = [labels_reverse[lab_id] for lab_id in lab_ids] for word, label in zip(flat_data, pre_lab): print word, label if __name__ == '__main__': paddle.init(use_gpu=False, trainer_count=1) ner_net_train(data_reader=train_data_reader, num_passes=1) ner_net_infer( data_reader=test_data_reader, model_file='params_pass_0.tar.gz')<|fim▁end|>
<|file_name|>tour.es6.js<|end_file_name|><|fim▁begin|>/** * @file * Attaches behaviors for the Tour module's toolbar tab. */ (($, Backbone, Drupal, settings, document, Shepherd) => { const queryString = decodeURI(window.location.search); /** * Attaches the tour's toolbar tab behavior. * * It uses the query string for: * - tour: When ?tour=1 is present, the tour will start automatically after * the page has loaded. * - tips: Pass ?tips=class in the url to filter the available tips to the * subset which match the given class. * * @example * http://example.com/foo?tour=1&tips=bar * * @type {Drupal~behavior} * * @prop {Drupal~behaviorAttach} attach * Attach tour functionality on `tour` events. */ Drupal.behaviors.tour = { attach(context) { once('tour', 'body').forEach(() => { const model = new Drupal.tour.models.StateModel(); // eslint-disable-next-line no-new new Drupal.tour.views.ToggleTourView({ el: $(context).find('#toolbar-tab-tour'), model, }); model // Allow other scripts to respond to tour events. .on('change:isActive', (tourModel, isActive) => { $(document).trigger( isActive ? 'drupalTourStarted' : 'drupalTourStopped', ); }); // Initialization: check whether a tour is available on the current // page. if (settings._tour_internal) { model.set('tour', settings._tour_internal); } // Start the tour immediately if toggled via query string. if (/tour=?/i.test(queryString)) { model.set('isActive', true); } }); }, }; /** * @namespace */ Drupal.tour = Drupal.tour || { /** * @namespace Drupal.tour.models */ models: {}, /** * @namespace Drupal.tour.views */ views: {}, }; /** * Backbone Model for tours. * * @constructor * * @augments Backbone.Model */ Drupal.tour.models.StateModel = Backbone.Model.extend( /** @lends Drupal.tour.models.StateModel# */ { /** * @type {object} */ defaults: /** @lends Drupal.tour.models.StateModel# */ { /** * Indicates whether the Drupal root window has a tour. * * @type {Array} */ tour: [], /** * Indicates whether the tour is currently running. * * @type {bool} */ isActive: false, /** * Indicates which tour is the active one (necessary to cleanly stop). * * @type {Array} */ activeTour: [], }, }, ); Drupal.tour.views.ToggleTourView = Backbone.View.extend( /** @lends Drupal.tour.views.ToggleTourView# */ { /** * @type {object} */ events: { click: 'onClick' }, /** * Handles edit mode toggle interactions. * * @constructs * * @augments Backbone.View */ initialize() { this.listenTo(this.model, 'change:tour change:isActive', this.render); this.listenTo(this.model, 'change:isActive', this.toggleTour); }, /** * {@inheritdoc}<|fim▁hole|> * The `ToggleTourView` view. */ render() { // Render the visibility. this.$el.toggleClass('hidden', this._getTour().length === 0); // Render the state. const isActive = this.model.get('isActive'); this.$el .find('button') .toggleClass('is-active', isActive) .attr('aria-pressed', isActive); return this; }, /** * Model change handler; starts or stops the tour. */ toggleTour() { if (this.model.get('isActive')) { this._removeIrrelevantTourItems(this._getTour()); const tourItems = this.model.get('tour'); const that = this; if (tourItems.length) { // If Joyride is positioned relative to the top or bottom of an // element, and its secondary position is right or left, then the // arrow is also positioned right or left. Shepherd defaults to // center positioning the arrow. // // In most cases, this arrow positioning difference has // little impact. However, tours built with Joyride may have tips // using a higher level selector than the element the tip is // expected to point to, and relied on Joyride's arrow positioning // to align the arrow with the expected reference element. Joyride's // arrow positioning behavior is replicated here to prevent those // use cases from causing UI regressions. // // This modifier is provided here instead of TourViewBuilder (where // most position modifications are) because it includes adding a // JavaScript callback function. settings.tourShepherdConfig.defaultStepOptions.popperOptions.modifiers.push( { name: 'moveArrowJoyridePosition', enabled: true, phase: 'write', fn({ state }) { const { arrow } = state.elements; const { placement } = state; if ( arrow && /^top|bottom/.test(placement) && /-start|-end$/.test(placement) ) { const horizontalPosition = placement.split('-')[1]; const offset = horizontalPosition === 'start' ? 28 : state.elements.popper.clientWidth - 56; arrow.style.transform = `translate3d(${offset}px, 0px, 0px)`; } }, }, ); const shepherdTour = new Shepherd.Tour(settings.tourShepherdConfig); shepherdTour.on('cancel', () => { that.model.set('isActive', false); }); shepherdTour.on('complete', () => { that.model.set('isActive', false); }); tourItems.forEach((tourStepConfig, index) => { // Create the configuration for a given tour step by using values // defined in TourViewBuilder. // @see \Drupal\tour\TourViewBuilder::viewMultiple() const tourItemOptions = { title: tourStepConfig.title ? Drupal.checkPlain(tourStepConfig.title) : null, text: () => Drupal.theme('tourItemContent', tourStepConfig), attachTo: tourStepConfig.attachTo, buttons: [Drupal.tour.nextButton(shepherdTour, tourStepConfig)], classes: tourStepConfig.classes, index, }; tourItemOptions.when = { show() { const nextButton = shepherdTour.currentStep.el.querySelector('footer button'); // Drupal disables Shepherd's built in focus after item // creation functionality due to focus being set on the tour // item container after every scroll and resize event. In its // place, the 'next' button is focused here. nextButton.focus(); // When Stable or Stable 9 are part of the active theme, the // Drupal.tour.convertToJoyrideMarkup() function is available. // This function converts Shepherd markup to Joyride markup, // facilitating the use of the Shepherd library that is // backwards compatible with customizations intended for // Joyride. // The Drupal.tour.convertToJoyrideMarkup() function is // internal, and will eventually be removed from Drupal core. if (Drupal.tour.hasOwnProperty('convertToJoyrideMarkup')) { Drupal.tour.convertToJoyrideMarkup(shepherdTour); } }, }; shepherdTour.addStep(tourItemOptions); }); shepherdTour.start(); this.model.set({ isActive: true, activeTour: shepherdTour }); } } else { this.model.get('activeTour').cancel(); this.model.set({ isActive: false, activeTour: [] }); } }, /** * Toolbar tab click event handler; toggles isActive. * * @param {jQuery.Event} event * The click event. */ onClick(event) { this.model.set('isActive', !this.model.get('isActive')); event.preventDefault(); event.stopPropagation(); }, /** * Gets the tour. * * @return {array} * An array of Shepherd tour item objects. */ _getTour() { return this.model.get('tour'); }, /** * Removes tour items for elements that don't have matching page elements. * * Or that are explicitly filtered out via the 'tips' query string. * * @example * <caption>This will filter out tips that do not have a matching * page element or don't have the "bar" class.</caption> * http://example.com/foo?tips=bar * * @param {Object[]} tourItems * An array containing tour Step config objects. * The object properties relevant to this function: * - classes {string}: A string of classes to be added to the tour step * when rendered. * - selector {string}: The selector a tour step is associated with. */ _removeIrrelevantTourItems(tourItems) { const tips = /tips=([^&]+)/.exec(queryString); const filteredTour = tourItems.filter((tourItem) => { // If the query parameter 'tips' is set, remove all tips that don't // have the matching class. The `tourItem` variable is a step config // object, and the 'classes' property is a ShepherdJS Step() config // option that provides a string. if ( tips && tourItem.hasOwnProperty('classes') && tourItem.classes.indexOf(tips[1]) === -1 ) { return false; } // If a selector is configured but there isn't a matching element, // return false. return !( tourItem.selector && !document.querySelector(tourItem.selector) ); }); // If there are tours filtered, we'll have to update model. if (tourItems.length !== filteredTour.length) { filteredTour.forEach((filteredTourItem, filteredTourItemId) => { filteredTour[filteredTourItemId].counter = Drupal.t( '!tour_item of !total', { '!tour_item': filteredTourItemId + 1, '!total': filteredTour.length, }, ); if (filteredTourItemId === filteredTour.length - 1) { filteredTour[filteredTourItemId].cancelText = Drupal.t('End tour'); } }); this.model.set('tour', filteredTour); } }, }, ); /** * Provides an object that will become the tour item's 'next' button. * * Similar to a theme function, themes can override this function to customize * the resulting button. Unlike a theme function, it returns an object instead * of a string, which is why it is not part of Drupal.theme. * * @param {Tour} shepherdTour * A class representing a Shepherd site tour. * @param {Object} tourStepConfig * An object generated in TourViewBuilder used for creating the options * passed to `Tour.addStep(options)`. * Contains the following properties: * - id {string}: The tour.tip ID specified by its config * - selector {string|null}: The selector of the element the tour step is * attaching to. * - module {string}: The module providing the tip plugin used by this step. * - counter {string}: A string indicating which tour step this is out of * how many total steps. * - attachTo {Object} This is directly mapped to the `attachTo` Step() * option. It has two properties: * - element {string}: The selector of the element the step attaches to. * - on {string}: a PopperJS compatible string to specify step position. * - classes {string}: Will be added to the class attribute of the step. * - body {string}: Markup that is mapped to the `text` Step() option. Will * become the step content. * - title {string}: is mapped to the `title` Step() option. * * @return {{classes: string, action: string, text: string}} * An object structured in the manner Shepherd requires to create the * 'next' button. * * @see https://shepherdjs.dev/docs/Tour.html * @see \Drupal\tour\TourViewBuilder::viewMultiple() * @see https://shepherdjs.dev/docs/Step.html */ Drupal.tour.nextButton = (shepherdTour, tourStepConfig) => { return { classes: 'button button--primary', text: tourStepConfig.cancelText ? tourStepConfig.cancelText : Drupal.t('Next'), action: tourStepConfig.cancelText ? shepherdTour.cancel : shepherdTour.next, }; }; /** * Theme function for tour item content. * * @param {Object} tourStepConfig * An object generated in TourViewBuilder used for creating the options * passed to `Tour.addStep(options)`. * Contains the following properties: * - id {string}: The tour.tip ID specified by its config * - selector {string|null}: The selector of the element the tour step is * attaching to. * - module {string}: The module providing the tip plugin used by this step. * - counter {string}: A string indicating which tour step this is out of * how many total steps. * - attachTo {Object} This is directly mapped to the `attachTo` Step() * option. It has two properties: * - element {string}: The selector of the element the step attaches to. * - on {string}: a PopperJS compatible string to specify step position. * - classes {string}: Will be added to the class attribute of the step. * - body {string}: Markup that is mapped to the `text` Step() option. Will * become the step content. * - title {string}: is mapped to the `title` Step() option. * * @return {string} * The tour item content markup. * * @see \Drupal\tour\TourViewBuilder::viewMultiple() * @see https://shepherdjs.dev/docs/Step.html */ Drupal.theme.tourItemContent = (tourStepConfig) => `${tourStepConfig.body}<div class="tour-progress">${tourStepConfig.counter}</div>`; })(jQuery, Backbone, Drupal, drupalSettings, document, window.Shepherd);<|fim▁end|>
* * @return {Drupal.tour.views.ToggleTourView}
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url from django.conf import settings # Here, user contacts.profile will cause some 'mismatch' since contacts is also a module from profile import ProfileView from contacts import ContactsView from authen import Authenticate strid = settings.CONTACT_URL['strid'] user = settings.CONTACT_URL['user'] contact = settings.CONTACT_URL['contact'] auth = settings.CONTACT_URL['auth'] <|fim▁hole|> url(r'^(?P<'+user+r'>\w{5,18})/(?P<'+strid+r'>\w{16})/$', ProfileView.as_view()), url(r'^(?P<'+user+r'>\w{5,18})/(?P<'+strid+r'>\w{16})/(?P<'+contact+r'>\d+)/$', ContactsView.as_view()), )<|fim▁end|>
urlpatterns = patterns('', url(r'^api/'+auth+'$', Authenticate.as_view()), url(r'^api/(?P<'+strid+r'>\w{16})/$', ProfileView.as_view()), url(r'^api/(?P<'+strid+r'>\w{16})/(?P<'+contact+r'>\d+)/$', ContactsView.as_view()),
<|file_name|>config.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class Config(object): SPOTIPY_REDIRECT_URI = os.environ['SPOTIPY_REDIRECT_URI'] SPOTIPY_CLIENT_ID = os.environ['SPOTIPY_CLIENT_ID'] SPOTIPY_CLIENT_SECRET = os.environ['SPOTIPY_CLIENT_SECRET'] SPOTIFY_ACCESS_SCOPE = 'playlist-modify-public playlist-modify-private playlist-read-private user-library-read' ########### # Options # ########### # TRACKS_PER_ARTIST # # Number of tracks per artist to add to the playlist. # I recommend 5 or less. Max is 10. TRACKS_PER_ARTIST = 3 # COLLATE # # By default, the playlist will be ordered like: # - ARTIST A TRACK 1 # - ARTIST A TRACK 2 # - ARTIST A TRACK 3 # - ARTIST A TRACK 4 # - ARTIST A TRACK 5 # - ARTIST B TRACK 1 # - ARTIST B TRACK 2 # - ARTIST B TRACK 3 # ... # if COLLATE is set to True, it will instead be ordered like so: # - ARTIST A TRACK 1 # - ARTIST B TRACK 1 # - ARTIST C TRACK 1 # ... # - ARTIST Z TRACK 1 # - ARTIST A TRACK 2 # - ARTIST B TRACK 2 # ... COLLATE = False # PUBLIC # # Default False. Set True to make your generated playlist public. PUBLIC = False<|fim▁end|>
import os
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// DO NOT EDIT ! // This file was generated automatically from 'src/mako/cli/main.rs.mako' // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] #[macro_use] extern crate clap; extern crate yup_oauth2 as oauth2; extern crate yup_hyper_mock as mock; extern crate serde; extern crate hyper; extern crate mime; extern crate strsim; extern crate google_adsensehost4d1 as api; use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; mod cmn; use cmn::{InvalidOptionsError, CLIError, JsonTokenStorage, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo}; use std::default::Default; use std::str::FromStr; use oauth2::{Authenticator, DefaultAuthenticatorDelegate}; use serde::json; use clap::ArgMatches; enum DoitError { IoError(String, io::Error), ApiError(api::Error), } struct Engine<'n, 'a> { opt: ArgMatches<'n, 'a>, hub: api::AdSenseHost<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, JsonTokenStorage, hyper::Client>>, gp: Vec<&'static str>, gpm: Vec<(&'static str, &'static str)>, } impl<'n, 'a> Engine<'n, 'a> { fn _accounts_adclients_get(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adclients_get(opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adclients_list(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adclients_list(opt.value_of("account-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["page-token", "max-results"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_delete(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adunits_delete(opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("ad-unit-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_get(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adunits_get(opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("ad-unit-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_get_ad_code(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adunits_get_ad_code(opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("ad-unit-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "host-custom-channel-id" => { call = call.add_host_custom_channel_id(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["host-custom-channel-id"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_insert(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.type" => Some(("contentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.color" => Some(("contentAdsSettings.backupOption.color", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.url" => Some(("contentAdsSettings.backupOption.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.type" => Some(("contentAdsSettings.backupOption.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.size" => Some(("contentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.scripting-language" => Some(("mobileContentAdsSettings.scriptingLanguage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.type" => Some(("mobileContentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.markup-language" => Some(("mobileContentAdsSettings.markupLanguage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.size" => Some(("mobileContentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.corners" => Some(("customStyle.corners", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.url" => Some(("customStyle.colors.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.text" => Some(("customStyle.colors.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.border" => Some(("customStyle.colors.border", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.background" => Some(("customStyle.colors.background", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.title" => Some(("customStyle.colors.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.font.family" => Some(("customStyle.font.family", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.font.size" => Some(("customStyle.font.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.kind" => Some(("customStyle.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["background", "backup-option", "border", "code", "color", "colors", "content-ads-settings", "corners", "custom-style", "family", "font", "id", "kind", "markup-language", "mobile-content-ads-settings", "name", "scripting-language", "size", "status", "text", "title", "type", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::AdUnit = json::value::from_value(object).unwrap(); let mut call = self.hub.accounts().adunits_insert(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_list(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adunits_list(opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, "include-inactive" => { call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["include-inactive", "page-token", "max-results"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_patch(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.type" => Some(("contentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.color" => Some(("contentAdsSettings.backupOption.color", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.url" => Some(("contentAdsSettings.backupOption.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.type" => Some(("contentAdsSettings.backupOption.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.size" => Some(("contentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.scripting-language" => Some(("mobileContentAdsSettings.scriptingLanguage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.type" => Some(("mobileContentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.markup-language" => Some(("mobileContentAdsSettings.markupLanguage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.size" => Some(("mobileContentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.corners" => Some(("customStyle.corners", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.url" => Some(("customStyle.colors.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.text" => Some(("customStyle.colors.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.border" => Some(("customStyle.colors.border", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.background" => Some(("customStyle.colors.background", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.title" => Some(("customStyle.colors.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.font.family" => Some(("customStyle.font.family", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.font.size" => Some(("customStyle.font.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.kind" => Some(("customStyle.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["background", "backup-option", "border", "code", "color", "colors", "content-ads-settings", "corners", "custom-style", "family", "font", "id", "kind", "markup-language", "mobile-content-ads-settings", "name", "scripting-language", "size", "status", "text", "title", "type", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::AdUnit = json::value::from_value(object).unwrap(); let mut call = self.hub.accounts().adunits_patch(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("ad-unit-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_adunits_update(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.type" => Some(("contentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.color" => Some(("contentAdsSettings.backupOption.color", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.url" => Some(("contentAdsSettings.backupOption.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.backup-option.type" => Some(("contentAdsSettings.backupOption.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-ads-settings.size" => Some(("contentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.scripting-language" => Some(("mobileContentAdsSettings.scriptingLanguage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.type" => Some(("mobileContentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.markup-language" => Some(("mobileContentAdsSettings.markupLanguage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mobile-content-ads-settings.size" => Some(("mobileContentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.corners" => Some(("customStyle.corners", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.url" => Some(("customStyle.colors.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.text" => Some(("customStyle.colors.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.border" => Some(("customStyle.colors.border", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.background" => Some(("customStyle.colors.background", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.colors.title" => Some(("customStyle.colors.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.font.family" => Some(("customStyle.font.family", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.font.size" => Some(("customStyle.font.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-style.kind" => Some(("customStyle.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["background", "backup-option", "border", "code", "color", "colors", "content-ads-settings", "corners", "custom-style", "family", "font", "id", "kind", "markup-language", "mobile-content-ads-settings", "name", "scripting-language", "size", "status", "text", "title", "type", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::AdUnit = json::value::from_value(object).unwrap(); let mut call = self.hub.accounts().adunits_update(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_get(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().get(opt.value_of("account-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_list(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().list(&opt.values_of("filter-ad-client-id").unwrap_or(Vec::new()).iter().map(|&v| v.to_string()).collect::<Vec<String>>()); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_reports_generate(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().reports_generate(opt.value_of("account-id").unwrap_or(""), opt.value_of("start-date").unwrap_or(""), opt.value_of("end-date").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); }, "sort" => { call = call.add_sort(value.unwrap_or("")); }, "metric" => { call = call.add_metric(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, "locale" => { call = call.locale(value.unwrap_or("")); }, "filter" => { call = call.add_filter(value.unwrap_or("")); }, "dimension" => { call = call.add_dimension(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["sort", "locale", "metric", "max-results", "filter", "start-index", "dimension"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _adclients_get(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.adclients().get(opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _adclients_list(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.adclients().list(); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["page-token", "max-results"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _associationsessions_start(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.associationsessions().start(&opt.values_of("product-code").unwrap_or(Vec::new()).iter().map(|&v| v.to_string()).collect::<Vec<String>>(), opt.value_of("website-url").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "website-locale" => { call = call.website_locale(value.unwrap_or("")); }, "user-locale" => { call = call.user_locale(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["website-locale", "user-locale"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _associationsessions_verify(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.associationsessions().verify(opt.value_of("token").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _customchannels_delete(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.customchannels().delete(opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("custom-channel-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _customchannels_get(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.customchannels().get(opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("custom-channel-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _customchannels_insert(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "id", "kind", "name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::CustomChannel = json::value::from_value(object).unwrap(); let mut call = self.hub.customchannels().insert(request, opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _customchannels_list(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.customchannels().list(opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["page-token", "max-results"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _customchannels_patch(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "id", "kind", "name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::CustomChannel = json::value::from_value(object).unwrap(); let mut call = self.hub.customchannels().patch(request, opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("custom-channel-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _customchannels_update(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "id", "kind", "name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::CustomChannel = json::value::from_value(object).unwrap(); let mut call = self.hub.customchannels().update(request, opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _reports_generate(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.reports().generate(opt.value_of("start-date").unwrap_or(""), opt.value_of("end-date").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); }, "sort" => { call = call.add_sort(value.unwrap_or("")); }, "metric" => { call = call.add_metric(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, "locale" => { call = call.locale(value.unwrap_or("")); }, "filter" => { call = call.add_filter(value.unwrap_or("")); }, "dimension" => { call = call.add_dimension(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["sort", "locale", "metric", "max-results", "filter", "start-index", "dimension"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _urlchannels_delete(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.urlchannels().delete(opt.value_of("ad-client-id").unwrap_or(""), opt.value_of("url-channel-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _urlchannels_insert(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url-pattern" => Some(("urlPattern", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["id", "kind", "url-pattern"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::UrlChannel = json::value::from_value(object).unwrap(); let mut call = self.hub.urlchannels().insert(request, opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _urlchannels_list(&self, opt: &ArgMatches<'n, 'a>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.urlchannels().list(opt.value_of("ad-client-id").unwrap_or("")); for parg in opt.values_of("v").unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["page-token", "max-results"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option<InvalidOptionsError> = None; match self.opt.subcommand() { ("accounts", Some(opt)) => { match opt.subcommand() { ("adclients-get", Some(opt)) => { call_result = self._accounts_adclients_get(opt, dry_run, &mut err); }, ("adclients-list", Some(opt)) => { call_result = self._accounts_adclients_list(opt, dry_run, &mut err); }, ("adunits-delete", Some(opt)) => { call_result = self._accounts_adunits_delete(opt, dry_run, &mut err); }, ("adunits-get", Some(opt)) => { call_result = self._accounts_adunits_get(opt, dry_run, &mut err); }, ("adunits-get-ad-code", Some(opt)) => { call_result = self._accounts_adunits_get_ad_code(opt, dry_run, &mut err); }, ("adunits-insert", Some(opt)) => { call_result = self._accounts_adunits_insert(opt, dry_run, &mut err); }, ("adunits-list", Some(opt)) => { call_result = self._accounts_adunits_list(opt, dry_run, &mut err); }, ("adunits-patch", Some(opt)) => { call_result = self._accounts_adunits_patch(opt, dry_run, &mut err); }, ("adunits-update", Some(opt)) => { call_result = self._accounts_adunits_update(opt, dry_run, &mut err); }, ("get", Some(opt)) => { call_result = self._accounts_get(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._accounts_list(opt, dry_run, &mut err); }, ("reports-generate", Some(opt)) => { call_result = self._accounts_reports_generate(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("accounts".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("adclients", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._adclients_get(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._adclients_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("adclients".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("associationsessions", Some(opt)) => { match opt.subcommand() { ("start", Some(opt)) => { call_result = self._associationsessions_start(opt, dry_run, &mut err); }, ("verify", Some(opt)) => { call_result = self._associationsessions_verify(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("associationsessions".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("customchannels", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { call_result = self._customchannels_delete(opt, dry_run, &mut err); }, ("get", Some(opt)) => { call_result = self._customchannels_get(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._customchannels_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._customchannels_list(opt, dry_run, &mut err); }, ("patch", Some(opt)) => { call_result = self._customchannels_patch(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._customchannels_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("customchannels".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("reports", Some(opt)) => { match opt.subcommand() { ("generate", Some(opt)) => { call_result = self._reports_generate(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("reports".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("urlchannels", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { call_result = self._urlchannels_delete(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._urlchannels_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._urlchannels_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("urlchannels".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); } } if dry_run { if err.issues.len() > 0 { err_opt = Some(err); } Err(err_opt) } else { Ok(call_result) } } // Please note that this call will fail if any part of the opt can't be handled fn new(opt: ArgMatches<'a, 'n>) -> Result<Engine<'a, 'n>, InvalidOptionsError> { let (config_dir, secret) = { let config_dir = match cmn::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) { Err(e) => return Err(InvalidOptionsError::single(e, 3)), Ok(p) => p, }; match cmn::application_secret_from_directory(&config_dir, "adsensehost4d1-secret.json", "{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") { Ok(secret) => (config_dir, secret), Err(e) => return Err(InvalidOptionsError::single(e, 4)) } }; let auth = Authenticator::new( &secret, DefaultAuthenticatorDelegate, if opt.is_present("debug-auth") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::<hyper::net::Openssl>::default() }) } else { hyper::Client::new() }, JsonTokenStorage { program_name: "adsensehost4d1", db_dir: config_dir.clone(), }, None); let client = if opt.is_present("debug") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::<hyper::net::Openssl>::default() }) } else { hyper::Client::new() }; let engine = Engine { opt: opt, hub: api::AdSenseHost::new(client, auth), gp: vec!["alt", "fields", "key", "oauth-token", "pretty-print", "quota-user", "user-ip"], gpm: vec![ ("oauth-token", "oauth_token"), ("pretty-print", "prettyPrint"), ("quota-user", "quotaUser"), ("user-ip", "userIp"), ] }; match engine._doit(true) { Err(Some(err)) => Err(err), Err(None) => Ok(engine), Ok(_) => unreachable!(), } } fn doit(&self) -> Result<(), DoitError> { match self._doit(false) { Ok(res) => res, Err(_) => unreachable!(), } } } fn main() { let mut exit_status = 0i32; let arg_data = [ ("accounts", "methods: 'adclients-get', 'adclients-list', 'adunits-delete', 'adunits-get', 'adunits-get-ad-code', 'adunits-insert', 'adunits-list', 'adunits-patch', 'adunits-update', 'get', 'list' and 'reports-generate'", vec![ ("adclients-get", Some(r##"Get information about one of the ad clients in the specified publisher's AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adclients-get", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad client."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client to get."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adclients-list", Some(r##"List all hosted ad clients in the specified hosted account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adclients-list", vec![ (Some(r##"account-id"##), None, Some(r##"Account for which to list ad clients."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-delete", Some(r##"Delete the specified ad unit from the specified publisher AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-delete", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad unit."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client for which to get ad unit."##), Some(true), Some(false)), (Some(r##"ad-unit-id"##), None, Some(r##"Ad unit to delete."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-get", Some(r##"Get the specified host ad unit in this AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-get", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad unit."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client for which to get ad unit."##), Some(true), Some(false)), (Some(r##"ad-unit-id"##), None, Some(r##"Ad unit to get."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-get-ad-code", Some(r##"Get ad code for the specified ad unit, attaching the specified host custom channels."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-get-ad-code", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad client."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client with contains the ad unit."##), Some(true), Some(false)), (Some(r##"ad-unit-id"##), None, Some(r##"Ad unit to get the code for."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-insert", Some(r##"Insert the supplied ad unit into the specified publisher AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-insert", vec![ (Some(r##"account-id"##), None, Some(r##"Account which will contain the ad unit."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client into which to insert the ad unit."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-list", Some(r##"List all ad units in the specified publisher's AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-list", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad client."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client for which to list ad units."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-patch", Some(r##"Update the supplied ad unit in the specified publisher AdSense account. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-patch", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad client."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client which contains the ad unit."##), Some(true), Some(false)), (Some(r##"ad-unit-id"##), None, Some(r##"Ad unit to get."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("adunits-update", Some(r##"Update the supplied ad unit in the specified publisher AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_adunits-update", vec![ (Some(r##"account-id"##), None, Some(r##"Account which contains the ad client."##), Some(true), Some(false)), (Some(r##"ad-client-id"##), None, Some(r##"Ad client which contains the ad unit."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("get", Some(r##"Get information about the selected associated AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_get", vec![ (Some(r##"account-id"##), None, Some(r##"Account to get information about."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"List hosted accounts associated with this AdSense account by ad client id."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_list", vec![ (Some(r##"filter-ad-client-id"##), None, Some(r##"Ad clients to list accounts for."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("reports-generate", Some(r##"Generate an AdSense report based on the report request sent in the query parameters. Returns the result as JSON; to retrieve output in CSV format specify "alt=csv" as a query parameter."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/accounts_reports-generate", vec![ (Some(r##"account-id"##), None, Some(r##"Hosted account upon which to report."##), Some(true), Some(false)), (Some(r##"start-date"##), None, Some(r##"Start of the date range to report on in "YYYY-MM-DD" format, inclusive."##), Some(true), Some(false)), (Some(r##"end-date"##), None, Some(r##"End of the date range to report on in "YYYY-MM-DD" format, inclusive."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("adclients", "methods: 'get' and 'list'", vec![ ("get", Some(r##"Get information about one of the ad clients in the Host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/adclients_get", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client to get."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"List all host ad clients in this AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/adclients_list", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("associationsessions", "methods: 'start' and 'verify'", vec![ ("start", Some(r##"Create an association session for initiating an association with an AdSense user."##),<|fim▁hole|> None, Some(r##"Products to associate with the user."##), Some(true), Some(false)), (Some(r##"website-url"##), None, Some(r##"The URL of the user's hosted website."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("verify", Some(r##"Verify an association session after the association callback returns from AdSense signup."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/associationsessions_verify", vec![ (Some(r##"token"##), None, Some(r##"The token returned to the association callback URL."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("customchannels", "methods: 'delete', 'get', 'insert', 'list', 'patch' and 'update'", vec![ ("delete", Some(r##"Delete a specific custom channel from the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/customchannels_delete", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client from which to delete the custom channel."##), Some(true), Some(false)), (Some(r##"custom-channel-id"##), None, Some(r##"Custom channel to delete."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("get", Some(r##"Get a specific custom channel from the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/customchannels_get", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client from which to get the custom channel."##), Some(true), Some(false)), (Some(r##"custom-channel-id"##), None, Some(r##"Custom channel to get."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("insert", Some(r##"Add a new custom channel to the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/customchannels_insert", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client to which the new custom channel will be added."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"List all host custom channels in this AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/customchannels_list", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client for which to list custom channels."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("patch", Some(r##"Update a custom channel in the host AdSense account. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/customchannels_patch", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client in which the custom channel will be updated."##), Some(true), Some(false)), (Some(r##"custom-channel-id"##), None, Some(r##"Custom channel to get."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("update", Some(r##"Update a custom channel in the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/customchannels_update", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client in which the custom channel will be updated."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("reports", "methods: 'generate'", vec![ ("generate", Some(r##"Generate an AdSense report based on the report request sent in the query parameters. Returns the result as JSON; to retrieve output in CSV format specify "alt=csv" as a query parameter."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/reports_generate", vec![ (Some(r##"start-date"##), None, Some(r##"Start of the date range to report on in "YYYY-MM-DD" format, inclusive."##), Some(true), Some(false)), (Some(r##"end-date"##), None, Some(r##"End of the date range to report on in "YYYY-MM-DD" format, inclusive."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("urlchannels", "methods: 'delete', 'insert' and 'list'", vec![ ("delete", Some(r##"Delete a URL channel from the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/urlchannels_delete", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client from which to delete the URL channel."##), Some(true), Some(false)), (Some(r##"url-channel-id"##), None, Some(r##"URL channel to delete."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("insert", Some(r##"Add a new URL channel to the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/urlchannels_insert", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client to which the new URL channel will be added."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"List all host URL channels in the host AdSense account."##), "Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/urlchannels_list", vec![ (Some(r##"ad-client-id"##), None, Some(r##"Ad client for which to list URL channels."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ]; let mut app = App::new("adsensehost4d1") .author("Sebastian Thiel <[email protected]>") .version("0.3.1+20150617") .about("Gives AdSense Hosts access to report generation, ad code generation, and publisher management capabilities.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli") .arg(Arg::with_name("url") .long("scope") .help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.") .multiple(true) .takes_value(true)) .arg(Arg::with_name("folder") .long("config-dir") .help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli") .multiple(false) .takes_value(true)) .arg(Arg::with_name("debug") .long("debug") .help("Output all server communication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)) .arg(Arg::with_name("debug-auth") .long("debug-auth") .help("Output all communication related to authentication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)); for &(main_command_name, ref about, ref subcommands) in arg_data.iter() { let mut mcmd = SubCommand::with_name(main_command_name).about(about); for &(sub_command_name, ref desc, url_info, ref args) in subcommands { let mut scmd = SubCommand::with_name(sub_command_name); if let &Some(desc) = desc { scmd = scmd.about(desc); } scmd = scmd.after_help(url_info); for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args { let arg_name_str = match (arg_name, flag) { (&Some(an), _ ) => an, (_ , &Some(f)) => f, _ => unreachable!(), }; let mut arg = Arg::with_name(arg_name_str) .empty_values(false); if let &Some(short_flag) = flag { arg = arg.short(short_flag); } if let &Some(desc) = desc { arg = arg.help(desc); } if arg_name.is_some() && flag.is_some() { arg = arg.takes_value(true); } if let &Some(required) = required { arg = arg.required(required); } if let &Some(multi) = multi { arg = arg.multiple(multi); } scmd = scmd.arg(arg); } mcmd = mcmd.subcommand(scmd); } app = app.subcommand(mcmd); } let matches = app.get_matches(); let debug = matches.is_present("debug"); match Engine::new(matches) { Err(err) => { exit_status = err.exit_code; writeln!(io::stderr(), "{}", err).ok(); }, Ok(engine) => { if let Err(doit_err) = engine.doit() { exit_status = 1; match doit_err { DoitError::IoError(path, err) => { writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok(); }, DoitError::ApiError(err) => { if debug { writeln!(io::stderr(), "{:?}", err).ok(); } else { writeln!(io::stderr(), "{}", err).ok(); } } } } } } std::process::exit(exit_status); }<|fim▁end|>
"Details at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli/associationsessions_start", vec![ (Some(r##"product-code"##),
<|file_name|>route53domains.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "route53domains")] <|fim▁hole|>use rusoto::{DefaultCredentialsProvider, Region}; use rusoto::default_tls_client; #[test] fn should_list_operations() { let credentials = DefaultCredentialsProvider::new().unwrap(); let client = Route53DomainsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1); let request = ListOperationsRequest::default(); client.list_operations(&request).unwrap(); }<|fim▁end|>
extern crate rusoto; use rusoto::route53domains::{Route53Domains, Route53DomainsClient, ListOperationsRequest};
<|file_name|>private.py<|end_file_name|><|fim▁begin|>"""Project URLs for authenticated users""" from django.conf.urls import patterns, url from readthedocs.projects.views.private import AliasList, ProjectDashboard, ImportView from readthedocs.projects.backends.views import ImportWizardView, ImportDemoView urlpatterns = patterns( # base view, flake8 complains if it is on the previous line. '', url(r'^$', ProjectDashboard.as_view(), name='projects_dashboard'), url(r'^import/$', ImportView.as_view(wizard_class=ImportWizardView), {'wizard': ImportWizardView}, name='projects_import'), url(r'^import/manual/$', ImportWizardView.as_view(), name='projects_import_manual'), url(r'^import/manual/demo/$', ImportDemoView.as_view(), name='projects_import_demo'), url(r'^import/github/$', 'readthedocs.projects.views.private.project_import_github', name='projects_import_github'), url(r'^import/bitbucket/$', 'readthedocs.projects.views.private.project_import_bitbucket', name='projects_import_bitbucket'), url(r'^(?P<project_slug>[-\w]+)/$', 'readthedocs.projects.views.private.project_manage', name='projects_manage'), url(r'^(?P<project_slug>[-\w]+)/alias/(?P<alias_id>\d+)/', 'readthedocs.projects.views.private.edit_alias', name='projects_alias_edit'), url(r'^(?P<project_slug>[-\w]+)/alias/$', 'readthedocs.projects.views.private.edit_alias', name='projects_alias_create'), url(r'^(?P<project_slug>[-\w]+)/alias/list/$', AliasList.as_view(), name='projects_alias_list'), url(r'^(?P<project_slug>[-\w]+)/comments_moderation/$', 'readthedocs.projects.views.private.project_comments_moderation', name='projects_comments_moderation'), url(r'^(?P<project_slug>[-\w]+)/edit/$', 'readthedocs.projects.views.private.project_edit', name='projects_edit'), url(r'^(?P<project_slug>[-\w]+)/advanced/$', 'readthedocs.projects.views.private.project_advanced', name='projects_advanced'), url(r'^(?P<project_slug>[-\w]+)/version/(?P<version_slug>[^/]+)/delete_html/$', 'readthedocs.projects.views.private.project_version_delete_html', name='project_version_delete_html'), url(r'^(?P<project_slug>[-\w]+)/version/(?P<version_slug>[^/]+)/$', 'readthedocs.projects.views.private.project_version_detail', name='project_version_detail'), url(r'^(?P<project_slug>[-\w]+)/versions/$',<|fim▁hole|> url(r'^(?P<project_slug>[-\w]+)/delete/$', 'readthedocs.projects.views.private.project_delete', name='projects_delete'), url(r'^(?P<project_slug>[-\w]+)/subprojects/delete/(?P<child_slug>[-\w]+)/$', # noqa 'readthedocs.projects.views.private.project_subprojects_delete', name='projects_subprojects_delete'), url(r'^(?P<project_slug>[-\w]+)/subprojects/$', 'readthedocs.projects.views.private.project_subprojects', name='projects_subprojects'), url(r'^(?P<project_slug>[-\w]+)/users/$', 'readthedocs.projects.views.private.project_users', name='projects_users'), url(r'^(?P<project_slug>[-\w]+)/users/delete/$', 'readthedocs.projects.views.private.project_users_delete', name='projects_users_delete'), url(r'^(?P<project_slug>[-\w]+)/notifications/$', 'readthedocs.projects.views.private.project_notifications', name='projects_notifications'), url(r'^(?P<project_slug>[-\w]+)/comments/$', 'readthedocs.projects.views.private.project_comments_settings', name='projects_comments'), url(r'^(?P<project_slug>[-\w]+)/notifications/delete/$', 'readthedocs.projects.views.private.project_notifications_delete', name='projects_notification_delete'), url(r'^(?P<project_slug>[-\w]+)/translations/$', 'readthedocs.projects.views.private.project_translations', name='projects_translations'), url(r'^(?P<project_slug>[-\w]+)/translations/delete/(?P<child_slug>[-\w]+)/$', # noqa 'readthedocs.projects.views.private.project_translations_delete', name='projects_translations_delete'), url(r'^(?P<project_slug>[-\w]+)/redirects/$', 'readthedocs.projects.views.private.project_redirects', name='projects_redirects'), url(r'^(?P<project_slug>[-\w]+)/redirects/delete/$', 'readthedocs.projects.views.private.project_redirects_delete', name='projects_redirects_delete'), )<|fim▁end|>
'readthedocs.projects.views.private.project_versions', name='projects_versions'),
<|file_name|>config.py<|end_file_name|><|fim▁begin|>class Config(dict): def __init__ (self):<|fim▁hole|> self['server'] = 'chat.freenode.com' self['port'] = 6667 self['debug'] = False self['verbose'] = False self['web_host'] = '0.0.0.0' self['web_port'] = 8080 def __getattr__(self, attr): try: return self.__getitem__(item) except KeyError: raise AttributeError(item) def __setattr__ (self, attr, value): if self.__dict__.has_key(item): # any normal attributes are handled normally dict.__setattr__(self, attr, value) else: self.__setitem__(attr, value)<|fim▁end|>
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/** * Created by Keerthikan on 29-Apr-17.<|fim▁hole|><|fim▁end|>
*/ export {expenseSpyFactory} from './expense-spy-factory'; export {compensationSpyFactory} from './compensation-spy-factory';
<|file_name|>GlobalCallbacks.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h" using namespace System::Linq; namespace JsPie { namespace Scripting { namespace V8 { GlobalCallbacks::GlobalCallbacks(IScriptEnvironment^ environment) { _pConsoleCallbacks = new ConsoleCallbacks(); auto controllerIds = Enumerable::ToList(Enumerable::Union( environment->ControllerDirectory->InputControllers->Keys, environment->ControllerDirectory->OutputControllers->Keys)); auto count = controllerIds->Count; _pControllerCallbacks = new ControllerCallbacks*[count + 1]; for (auto i = 0; i < count; i++) { auto controllerId = controllerIds[i]; ControllerInfo^ inputController; if (!environment->ControllerDirectory->InputControllers->TryGetValue(controllerId, inputController)) inputController = nullptr; ControllerInfo^ outputController; if (!environment->ControllerDirectory->OutputControllers->TryGetValue(controllerId, outputController)) outputController = nullptr; _pControllerCallbacks[i] = new ControllerCallbacks(inputController, outputController); }<|fim▁hole|> GlobalCallbacks::~GlobalCallbacks() { delete _pConsoleCallbacks; auto ppController = _pControllerCallbacks; while (true) { auto pController = *ppController++; if (pController == NULL) break; delete pController; } delete _pControllerCallbacks; } v8::Local<v8::ObjectTemplate> GlobalCallbacks::CreateTemplate(v8::Isolate* pIsolate) { v8::EscapableHandleScope handle_scope(pIsolate); auto global = v8::ObjectTemplate::New(pIsolate); auto ppController = _pControllerCallbacks; while (true) { auto pController = *ppController++; if (pController == NULL) break; auto oControllerInfo = pController->GetInputControllerInfo(); if (oControllerInfo == nullptr) oControllerInfo = pController->GetOutputControllerInfo(); global->Set(ToV8String(pIsolate, oControllerInfo->Name), pController->CreateTemplate(pIsolate)); } global->Set(v8::String::NewFromUtf8(pIsolate, "console"), _pConsoleCallbacks->CreateTemplate(pIsolate)); return handle_scope.Escape(global); } } } }<|fim▁end|>
_pControllerCallbacks[count] = NULL; }
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// -*- Mode: Go; indent-tabs-mode: t -*- /* * Copyright (C) 2014-2015 Canonical Ltd * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 3 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package main import ( "fmt" "os" "path/filepath" "strings" "syscall" "github.com/jessevdk/go-flags" "github.com/snapcore/snapd/dirs" "github.com/snapcore/snapd/osutil" "github.com/snapcore/snapd/snap" "github.com/snapcore/snapd/snap/snapenv" ) // for the tests var syscallExec = syscall.Exec var osReadlink = os.Readlink // commandline args var opts struct { Command string `long:"command" description:"use a different command like {stop,post-stop} from the app"` Hook string `long:"hook" description:"hook to run" hidden:"yes"` } func init() { // plug/slot sanitization not used nor possible from snap-exec, make it no-op snap.SanitizePlugsSlots = func(snapInfo *snap.Info) {} } func main() { if err := run(); err != nil { fmt.Fprintf(os.Stderr, "cannot snap-exec: %s\n", err) os.Exit(1) } } func parseArgs(args []string) (app string, appArgs []string, err error) { parser := flags.NewParser(&opts, flags.HelpFlag|flags.PassDoubleDash|flags.PassAfterNonOption) rest, err := parser.ParseArgs(args) if err != nil { return "", nil, err } if len(rest) == 0 { return "", nil, fmt.Errorf("need the application to run as argument") } // Catch some invalid parameter combinations, provide helpful errors if opts.Hook != "" && opts.Command != "" { return "", nil, fmt.Errorf("cannot use --hook and --command together") } if opts.Hook != "" && len(rest) > 1 { return "", nil, fmt.Errorf("too many arguments for hook %q: %s", opts.Hook, strings.Join(rest, " ")) } return rest[0], rest[1:], nil } func run() error { snapApp, extraArgs, err := parseArgs(os.Args[1:]) if err != nil { return err } // the SNAP_REVISION is set by `snap run` - we can not (easily) // find it in `snap-exec` because `snap-exec` is run inside the // confinement and (generally) can not talk to snapd revision := os.Getenv("SNAP_REVISION") // Now actually handle the dispatching if opts.Hook != "" { return execHook(snapApp, revision, opts.Hook) } return execApp(snapApp, revision, opts.Command, extraArgs) } const defaultShell = "/bin/bash" func findCommand(app *snap.AppInfo, command string) (string, error) { var cmd string switch command { case "shell": cmd = defaultShell case "complete": if app.Completer != "" { cmd = defaultShell } case "stop": cmd = app.StopCommand case "reload": cmd = app.ReloadCommand case "post-stop": cmd = app.PostStopCommand case "", "gdb", "gdbserver": cmd = app.Command default: return "", fmt.Errorf("cannot use %q command", command) } if cmd == "" { return "", fmt.Errorf("no %q command found for %q", command, app.Name) } return cmd, nil } func absoluteCommandChain(snapInfo *snap.Info, commandChain []string) []string { chain := make([]string, 0, len(commandChain)) snapMountDir := snapInfo.MountDir() for _, element := range commandChain { chain = append(chain, filepath.Join(snapMountDir, element)) } return chain } // expandEnvCmdArgs takes the string list of commandline arguments // and expands any $VAR with the given var from the env argument. func expandEnvCmdArgs(args []string, env osutil.Environment) []string { cmdArgs := make([]string, 0, len(args)) for _, arg := range args { maybeExpanded := os.Expand(arg, func(varName string) string { return env[varName] }) if maybeExpanded != "" { cmdArgs = append(cmdArgs, maybeExpanded) } } return cmdArgs } func completionHelper() (string, error) { exe, err := osReadlink("/proc/self/exe") if err != nil { return "", err } return filepath.Join(filepath.Dir(exe), "etelpmoc.sh"), nil } func execApp(snapApp, revision, command string, args []string) error { rev, err := snap.ParseRevision(revision) if err != nil { return fmt.Errorf("cannot parse revision %q: %s", revision, err) } snapName, appName := snap.SplitSnapApp(snapApp) info, err := snap.ReadInfo(snapName, &snap.SideInfo{ Revision: rev, }) if err != nil { return fmt.Errorf("cannot read info for %q: %s", snapName, err) } app := info.Apps[appName] if app == nil { return fmt.Errorf("cannot find app %q in %q", appName, snapName) } cmdAndArgs, err := findCommand(app, command) if err != nil { return err } // build the environment from the yaml, translating TMPDIR and // similar variables back from where they were hidden when // invoking the setuid snap-confine. env, err := osutil.OSEnvironmentUnescapeUnsafe(snapenv.PreservedUnsafePrefix) if err != nil { return err } for _, eenv := range app.EnvChain() { env.ExtendWithExpanded(eenv) } // strings.Split() is ok here because we validate all app fields and the // whitelist is pretty strict (see snap/validate.go:appContentWhitelist) // (see also overlord/snapstate/check_snap.go's normPath) tmpArgv := strings.Split(cmdAndArgs, " ") cmd := tmpArgv[0] cmdArgs := expandEnvCmdArgs(tmpArgv[1:], env) // run the command fullCmd := []string{filepath.Join(app.Snap.MountDir(), cmd)} switch command { case "shell": fullCmd[0] = defaultShell cmdArgs = nil case "complete": fullCmd[0] = defaultShell helper, err := completionHelper() if err != nil { return fmt.Errorf("cannot find completion helper: %v", err) } cmdArgs = []string{ helper, filepath.Join(app.Snap.MountDir(), app.Completer), } case "gdb": fullCmd = append(fullCmd, fullCmd[0]) fullCmd[0] = filepath.Join(dirs.CoreLibExecDir, "snap-gdb-shim") case "gdbserver": fullCmd = append(fullCmd, fullCmd[0]) fullCmd[0] = filepath.Join(dirs.CoreLibExecDir, "snap-gdbserver-shim") } fullCmd = append(fullCmd, cmdArgs...) fullCmd = append(fullCmd, args...) fullCmd = append(absoluteCommandChain(app.Snap, app.CommandChain), fullCmd...) if err := syscallExec(fullCmd[0], fullCmd, env.ForExec()); err != nil { return fmt.Errorf("cannot exec %q: %s", fullCmd[0], err) } // this is never reached except in tests return nil } func execHook(snapName, revision, hookName string) error { rev, err := snap.ParseRevision(revision) if err != nil { return err } info, err := snap.ReadInfo(snapName, &snap.SideInfo{ Revision: rev, }) if err != nil { return err } hook := info.Hooks[hookName] if hook == nil { return fmt.Errorf("cannot find hook %q in %q", hookName, snapName) } // build the environment // NOTE: we do not use OSEnvironmentUnescapeUnsafe, we do not // particurly want to transmit snapd exec environment details // to the hooks<|fim▁hole|> for _, eenv := range hook.EnvChain() { env.ExtendWithExpanded(eenv) } // run the hook cmd := append(absoluteCommandChain(hook.Snap, hook.CommandChain), filepath.Join(hook.Snap.HooksDir(), hook.Name)) return syscallExec(cmd[0], cmd, env.ForExec()) }<|fim▁end|>
env, err := osutil.OSEnvironment() if err != nil { return err }
<|file_name|>problem002.py<|end_file_name|><|fim▁begin|>''' Problem 2 19 October 2001 Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be: 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ... By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms. <|fim▁hole|>@author: ahallmann ''' import unittest import timeit def generate_fibonacci_sequence(limit=0): a = 1 yield a b = 1 yield b i = 0 while(limit == 0 or a + b < limit): i = a + b a = b b = i yield i ''' By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms. ''' def solve(limit=4000000): msum = 0 for i in generate_fibonacci_sequence(limit): if i % 2 == 0: msum += i return msum class Test(unittest.TestCase): def test_sample(self): self.assertEqual(44, solve(100)) def test_answer(self): self.assertEqual(4613732, solve()) # ----------------------------------------- def run(): return solve() if __name__ == '__main__': unittest.main() if __name__ == '__main__': t = timeit.Timer("run()", "from __main__ import run") count = 10000 print str(t.timeit(count)) + " seconds for " + str(count) + " runs"<|fim▁end|>
---------------------------------------------------------- Created on 25.01.2012
<|file_name|>main.js<|end_file_name|><|fim▁begin|>console.log("J U I loaded"); var timer; var timer2; var timer3; var counter; var cell; var curCount = 0; var gameLost = false; var multiUp = 0; var total = 10; var score = 0; var points = 100; var board = [ 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0]; var cellTypes = [ {name: "empty", color: "white"}, {name: "normal", color: "black"}, {name: "freeze", color: "blue"}, {name: "multi", color: "green"}, {name: "max", color: "yellow"}, {name: "super", color: "red"} ]; var WHITE = 0, BLACK = 1, BLUE = 2, GREEN = 3, YELLOW = 4, RED = 5; var randomType = function() { var randomNum = Math.floor((Math.random() * 100) + 1); if (randomNum < 88) return BLACK; // 88% else if (randomNum < 91) return BLUE; // 3% else if (randomNum < 94) return GREEN; // 3% else if (randomNum < 97) return YELLOW; // 3% else return RED; // 3% }; // EFFECTS ------------------------------------------------------------- var freezeEffect = function() { clearInterval(timer2); setTimeout(function() { timer2 = setInterval(tickCell, counter); }, 2000); }; var multiEffect = function() { multiUp++; points = Math.floor((1 + (multiUp * 0.1)) * points); }; var maxEffect = function() { total++; }; var superEffect = function() { for (var i = 0; i < board.length; i++) { if (board[i] < 2) board[i] = 0; }; }; // LOSING & PRINTSTATE ------------------------------------------------ var hasLost = function() { var count = 0; for (var i = 0; i < board.length; i++) { if (board[i] > 0 && board[i] < 2) count++; if (count > total) break; }; curCount = count; return count > total; }; var printState = function() { $("#max2").text(curCount); $("#max4").text(total); $("#score2").text(score); }; // PICKCELL & FADE ---------------------------------------------------- var pickCell = function() { var randomIndex; do { randomIndex = Math.floor(Math.random() * board.length); } while (board[randomIndex] !== 0); board[randomIndex] = randomType(); var $fade = $("#cell" + randomIndex); if (board[randomIndex] > BLACK) { setTimeout(function() { $fade.animate({backgroundColor: "white"}, 700); setTimeout(function() { board[randomIndex] = 0; }, 690); }, 1000); }; }; // CLICK MOVE --------------------------------------------------------- var click = function(evt) { clickValue = parseInt(this.id.substr(4)); if (board[clickValue] > BLACK) { $(this).stop() .toggle("explode", {pieces:16}) .css({backgroundColor: "white"}) .fadeIn(); }; if (board[clickValue] === BLACK) score += points; else if (board[clickValue] === BLUE) freezeEffect(); else if (board[clickValue] === GREEN) multiEffect(); else if (board[clickValue] === YELLOW) maxEffect(); else if (board[clickValue] === RED) superEffect(); board[clickValue] = 0; render(); }; // RENDER ------------------------------------------------------------- var render = function() { if (gameLost === false) renderBoard(); function renderBoard() { board.forEach(function(cell, idx) { var el = $('#cell' + idx); el.css('background-color', cellTypes[cell].color); }); }; }; // TICKS -------------------------------------------------------------- var tick = function() { printState(); if (hasLost()) { clearInterval(timer); clearInterval(timer2); gameLost = true; var lose = function() { $(".lose").css({opacity:1}); setTimeout(function() { $(".lose").css({opacity:0}); }, 400); }; timer3 = setInterval(lose, 800); }; printState(); };<|fim▁hole|> pickCell(); render(); clearInterval(timer2); counter *= 0.99; timer2 = setInterval(tickCell, counter); }; // STARTGAME ---------------------------------------------------------- var startGame = function() { for (var i = 0; i < board.length; i++) { board[i] = 0; }; curCount = 0; gameLost = false; multiUp = 0; total = 10; score = 0; points = 100; counter = 800; printState(); render(); clearInterval(timer3); clearInterval(timer2); clearInterval(timer); timer = setInterval(tick, 10); timer2 = setInterval(tickCell, counter); }; // BUTTONS ------------------------------------------------------------ $("#startgame").on('click', function() { $(".one").css("display", "none"); $(".two").css("display","inline"); $(".three").css("display","none"); startGame(); }); $("#restart").on('click', function() { startGame(); }); $("#mainmenu").on('click', function() { $(".one").css("display", "inline"); $(".two").css("display","none"); $(".three").css("display","none"); }); $("#howtoplay").on('click', function() { $(".one").css("display", "none"); $(".two").css("display","none"); $(".three").css("display","inline"); }); $("#mainmenu2").on('click', function() { $(".one").css("display", "inline"); $(".two").css("display","none"); $(".three").css("display","none"); }); $("#startgame2").on('click', function() { $(".one").css("display", "none"); $(".two").css("display","inline"); $(".three").css("display","none"); startGame(); }); var audio = document.getElementById("tomb"); var mute = document.getElementById('mute'); mute.onclick = function() { audio.muted = !audio.muted; }; audio.addEventListener('ended', function() { this.currentTime = 0; this.play(); }, false); // EVENTLISTENERS ----------------------------------------------------- document.addEventListener("DOMContentLoaded", function(evt) { var cellEls = document.querySelectorAll("td"); for (var i = 0; i < board.length; i++) { cellEls[i].addEventListener("click", click); }; });<|fim▁end|>
var tickCell = function() {
<|file_name|>settings_dev.py<|end_file_name|><|fim▁begin|>""" Django settings for lark project. For more information on this file, see https://docs.djangoproject.com/en/dev/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/dev/ref/settings/ """ from .base import * # Build paths inside the project like this: os.path.join(BASE_DIR, ...) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = ')i4@2vfr##+zd3cn8ckw#!lebya1mk2sg@yq9boog+=ofi@hf9' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Database # https://docs.djangoproject.com/en/dev/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': {<|fim▁hole|> }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'filters': { }, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'django.utils.log.NullHandler', }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler', # 'filters': ['special'] } }, 'loggers': { 'django': { 'handlers': ['null'], 'propagate': True, 'level': 'INFO', }, 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, }, 'music': { 'handlers': ['console'], 'level': 'DEBUG', # 'filters': ['special'] }, } }<|fim▁end|>
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
<|file_name|>setup_driver.py<|end_file_name|><|fim▁begin|>from distutils.core import setup setup( name = "cnccontrol-driver", description = "CNC-Control device driver",<|fim▁hole|><|fim▁end|>
author = "Michael Buesch", author_email = "[email protected]", py_modules = [ "cnccontrol_driver", ], )
<|file_name|>uniqBy.js<|end_file_name|><|fim▁begin|>import _Set from './internal/_Set.js'; import _curry2 from './internal/_curry2.js'; /** * Returns a new list containing only one copy of each element in the original * list, based upon the value returned by applying the supplied function to * each list element. Prefers the first item if the supplied function produces * the same value on two items. [`R.equals`](#equals) is used for comparison. * * @func * @memberOf R * @since v0.16.0 * @category List * @sig (a -> b) -> [a] -> [a] * @param {Function} fn A function used to produce a value to use during comparisons. * @param {Array} list The array to consider. * @return {Array} The list of unique items. * @example * * R.uniqBy(Math.abs, [-1, -5, 2, 10, 1, 2]); //=> [-1, -5, 2, 10]<|fim▁hole|> var result = []; var idx = 0; var appliedItem, item; while (idx < list.length) { item = list[idx]; appliedItem = fn(item); if (set.add(appliedItem)) { result.push(item); } idx += 1; } return result; }); export default uniqBy;<|fim▁end|>
*/ var uniqBy = _curry2(function uniqBy(fn, list) { var set = new _Set();
<|file_name|>materializer.go<|end_file_name|><|fim▁begin|>/* Copyright 2019 The Vitess Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package wrangler import ( "context" "fmt" "hash/fnv" "math" "sort" "strings" "sync" "text/template" "google.golang.org/protobuf/encoding/prototext" "google.golang.org/protobuf/proto" "vitess.io/vitess/go/json2" "vitess.io/vitess/go/sqltypes" "vitess.io/vitess/go/vt/binlog/binlogplayer" "vitess.io/vitess/go/vt/concurrency" "vitess.io/vitess/go/vt/key" "vitess.io/vitess/go/vt/log" "vitess.io/vitess/go/vt/mysqlctl/tmutils" "vitess.io/vitess/go/vt/schema" "vitess.io/vitess/go/vt/sqlparser" "vitess.io/vitess/go/vt/topo" "vitess.io/vitess/go/vt/topotools" "vitess.io/vitess/go/vt/vtctl/schematools" "vitess.io/vitess/go/vt/vtctl/workflow" "vitess.io/vitess/go/vt/vterrors" "vitess.io/vitess/go/vt/vtgate/evalengine" "vitess.io/vitess/go/vt/vtgate/vindexes" "vitess.io/vitess/go/vt/vttablet/tabletmanager/vreplication" binlogdatapb "vitess.io/vitess/go/vt/proto/binlogdata" querypb "vitess.io/vitess/go/vt/proto/query" vschemapb "vitess.io/vitess/go/vt/proto/vschema" vtctldatapb "vitess.io/vitess/go/vt/proto/vtctldata" ) type materializer struct { wr *Wrangler ms *vtctldatapb.MaterializeSettings targetVSchema *vindexes.KeyspaceSchema sourceShards []*topo.ShardInfo targetShards []*topo.ShardInfo } const ( createDDLAsCopy = "copy" createDDLAsCopyDropConstraint = "copy:drop_constraint" ) // addTablesToVSchema adds tables to an (unsharded) vschema. Depending on copyAttributes It will also add any sequence info // that is associated with a table by copying it from the vschema of the source keyspace. // For a migrate workflow we do not copy attributes since the source keyspace is just a proxy to import data into Vitess // Todo: For now we only copy sequence but later we may also want to copy other attributes like authoritative column flag and list of columns func (wr *Wrangler) addTablesToVSchema(ctx context.Context, sourceKeyspace string, targetVSchema *vschemapb.Keyspace, tables []string, copyAttributes bool) error { if targetVSchema.Tables == nil { targetVSchema.Tables = make(map[string]*vschemapb.Table) } for _, table := range tables { targetVSchema.Tables[table] = &vschemapb.Table{} } if copyAttributes { // if source keyspace is provided, copy over the sequence info. srcVSchema, err := wr.ts.GetVSchema(ctx, sourceKeyspace) if err != nil { return err } for _, table := range tables { srcTable, ok := srcVSchema.Tables[table] if ok { targetVSchema.Tables[table].AutoIncrement = srcTable.AutoIncrement } } } return nil } func shouldInclude(table string, excludes []string) bool { // We filter out internal tables elsewhere when processing SchemaDefinition // structures built from the GetSchema database related API calls. In this // case, however, the table list comes from the user via the -tables flag // so we need to filter out internal table names here in case a user has // explicitly specified some. // This could happen if there's some automated tooling that creates the list of // tables to explicitly specify. // But given that this should never be done in practice, we ignore the request. if schema.IsInternalOperationTableName(table) { return false } for _, t := range excludes { if t == table { return false } } return true } // MoveTables initiates moving table(s) over to another keyspace func (wr *Wrangler) MoveTables(ctx context.Context, workflow, sourceKeyspace, targetKeyspace, tableSpecs, cell, tabletTypes string, allTables bool, excludeTables string, autoStart, stopAfterCopy bool, externalCluster string) error { //FIXME validate tableSpecs, allTables, excludeTables var tables []string var externalTopo *topo.Server var err error if externalCluster != "" { // when the source is an external mysql cluster mounted using the Mount command externalTopo, err = wr.ts.OpenExternalVitessClusterServer(ctx, externalCluster) if err != nil { return err } wr.sourceTs = externalTopo log.Infof("Successfully opened external topo: %+v", externalTopo) } var vschema *vschemapb.Keyspace vschema, err = wr.ts.GetVSchema(ctx, targetKeyspace) if err != nil { return err } if vschema == nil { return fmt.Errorf("no vschema found for target keyspace %s", targetKeyspace) } if strings.HasPrefix(tableSpecs, "{") { if vschema.Tables == nil { vschema.Tables = make(map[string]*vschemapb.Table) } wrap := fmt.Sprintf(`{"tables": %s}`, tableSpecs) ks := &vschemapb.Keyspace{} if err := json2.Unmarshal([]byte(wrap), ks); err != nil { return err } for table, vtab := range ks.Tables { vschema.Tables[table] = vtab tables = append(tables, table) } } else { if len(strings.TrimSpace(tableSpecs)) > 0 { tables = strings.Split(tableSpecs, ",") } ksTables, err := wr.getKeyspaceTables(ctx, sourceKeyspace, wr.sourceTs) if err != nil { return err } if len(tables) > 0 { err = wr.validateSourceTablesExist(ctx, sourceKeyspace, ksTables, tables) if err != nil { return err } } else { if allTables { tables = ksTables } else { return fmt.Errorf("no tables to move") } } var excludeTablesList []string excludeTables = strings.TrimSpace(excludeTables) if excludeTables != "" { excludeTablesList = strings.Split(excludeTables, ",") err = wr.validateSourceTablesExist(ctx, sourceKeyspace, ksTables, excludeTablesList) if err != nil { return err } } var tables2 []string for _, t := range tables { if shouldInclude(t, excludeTablesList) { tables2 = append(tables2, t) } } tables = tables2 if len(tables) == 0 { return fmt.Errorf("no tables to move") } log.Infof("Found tables to move: %s", strings.Join(tables, ",")) if !vschema.Sharded { if err := wr.addTablesToVSchema(ctx, sourceKeyspace, vschema, tables, externalTopo == nil); err != nil { return err } } } if externalTopo == nil { // Save routing rules before vschema. If we save vschema first, and routing rules // fails to save, we may generate duplicate table errors. rules, err := topotools.GetRoutingRules(ctx, wr.ts) if err != nil { return err } for _, table := range tables { toSource := []string{sourceKeyspace + "." + table} rules[table] = toSource rules[table+"@replica"] = toSource rules[table+"@rdonly"] = toSource rules[targetKeyspace+"."+table] = toSource rules[targetKeyspace+"."+table+"@replica"] = toSource rules[targetKeyspace+"."+table+"@rdonly"] = toSource rules[targetKeyspace+"."+table] = toSource rules[sourceKeyspace+"."+table+"@replica"] = toSource rules[sourceKeyspace+"."+table+"@rdonly"] = toSource } if err := topotools.SaveRoutingRules(ctx, wr.ts, rules); err != nil { return err } if vschema != nil { // We added to the vschema. if err := wr.ts.SaveVSchema(ctx, targetKeyspace, vschema); err != nil { return err } } } if err := wr.ts.RebuildSrvVSchema(ctx, nil); err != nil { return err } ms := &vtctldatapb.MaterializeSettings{ Workflow: workflow, MaterializationIntent: vtctldatapb.MaterializationIntent_MOVETABLES, SourceKeyspace: sourceKeyspace, TargetKeyspace: targetKeyspace, Cell: cell, TabletTypes: tabletTypes, StopAfterCopy: stopAfterCopy, ExternalCluster: externalCluster, } for _, table := range tables { buf := sqlparser.NewTrackedBuffer(nil) buf.Myprintf("select * from %v", sqlparser.NewTableIdent(table)) ms.TableSettings = append(ms.TableSettings, &vtctldatapb.TableMaterializeSettings{ TargetTable: table, SourceExpression: buf.String(), CreateDdl: createDDLAsCopy, }) } mz, err := wr.prepareMaterializerStreams(ctx, ms) if err != nil { return err } tabletShards, err := wr.collectTargetStreams(ctx, mz) if err != nil { return err } migrationID, err := getMigrationID(targetKeyspace, tabletShards) if err != nil { return err } if externalCluster == "" { exists, tablets, err := wr.checkIfPreviousJournalExists(ctx, mz, migrationID) if err != nil { return err } if exists { wr.Logger().Errorf("Found a previous journal entry for %d", migrationID) msg := fmt.Sprintf("found an entry from a previous run for migration id %d in _vt.resharding_journal of tablets %s,", migrationID, strings.Join(tablets, ",")) msg += fmt.Sprintf("please review and delete it before proceeding and restart the workflow using the Workflow %s.%s start", workflow, targetKeyspace) return fmt.Errorf(msg) } } if autoStart { return mz.startStreams(ctx) } wr.Logger().Infof("Streams will not be started since -auto_start is set to false") return nil } func (wr *Wrangler) validateSourceTablesExist(ctx context.Context, sourceKeyspace string, ksTables, tables []string) error { // validate that tables provided are present in the source keyspace var missingTables []string for _, table := range tables { if schema.IsInternalOperationTableName(table) { continue } found := false for _, ksTable := range ksTables { if table == ksTable { found = true break } } if !found { missingTables = append(missingTables, table) } } if len(missingTables) > 0 { return fmt.Errorf("table(s) not found in source keyspace %s: %s", sourceKeyspace, strings.Join(missingTables, ",")) } return nil } func (wr *Wrangler) getKeyspaceTables(ctx context.Context, ks string, ts *topo.Server) ([]string, error) { shards, err := ts.GetServingShards(ctx, ks) if err != nil { return nil, err } if len(shards) == 0 { return nil, fmt.Errorf("keyspace %s has no shards", ks) } primary := shards[0].PrimaryAlias if primary == nil { return nil, fmt.Errorf("shard does not have a primary: %v", shards[0].ShardName()) } allTables := []string{"/.*/"} ti, err := ts.GetTablet(ctx, primary) if err != nil { return nil, err } schema, err := wr.tmc.GetSchema(ctx, ti.Tablet, allTables, nil, false) if err != nil { return nil, err } log.Infof("got table schemas from source primary %v.", primary) var sourceTables []string for _, td := range schema.TableDefinitions { sourceTables = append(sourceTables, td.Name) } return sourceTables, nil } func (wr *Wrangler) checkIfPreviousJournalExists(ctx context.Context, mz *materializer, migrationID int64) (bool, []string, error) { forAllSources := func(f func(*topo.ShardInfo) error) error { var wg sync.WaitGroup allErrors := &concurrency.AllErrorRecorder{} for _, sourceShard := range mz.sourceShards { wg.Add(1) go func(sourceShard *topo.ShardInfo) { defer wg.Done() if err := f(sourceShard); err != nil { allErrors.RecordError(err) } }(sourceShard) } wg.Wait() return allErrors.AggrError(vterrors.Aggregate) } var ( mu sync.Mutex exists bool tablets []string ws = workflow.NewServer(wr.ts, wr.tmc) ) err := forAllSources(func(si *topo.ShardInfo) error { tablet, err := wr.ts.GetTablet(ctx, si.PrimaryAlias) if err != nil { return err } if tablet == nil { return nil } _, exists, err = ws.CheckReshardingJournalExistsOnTablet(ctx, tablet.Tablet, migrationID) if err != nil { return err } if exists { mu.Lock() defer mu.Unlock() tablets = append(tablets, tablet.AliasString()) } return nil }) return exists, tablets, err } // CreateLookupVindex creates a lookup vindex and sets up the backfill. func (wr *Wrangler) CreateLookupVindex(ctx context.Context, keyspace string, specs *vschemapb.Keyspace, cell, tabletTypes string, continueAfterCopyWithOwner bool) error { ms, sourceVSchema, targetVSchema, err := wr.prepareCreateLookup(ctx, keyspace, specs, continueAfterCopyWithOwner) if err != nil { return err } if err := wr.ts.SaveVSchema(ctx, ms.TargetKeyspace, targetVSchema); err != nil { return err } ms.Cell = cell ms.TabletTypes = tabletTypes if err := wr.Materialize(ctx, ms); err != nil { return err } if err := wr.ts.SaveVSchema(ctx, keyspace, sourceVSchema); err != nil { return err } return wr.ts.RebuildSrvVSchema(ctx, nil) } // prepareCreateLookup performs the preparatory steps for creating a lookup vindex. func (wr *Wrangler) prepareCreateLookup(ctx context.Context, keyspace string, specs *vschemapb.Keyspace, continueAfterCopyWithOwner bool) (ms *vtctldatapb.MaterializeSettings, sourceVSchema, targetVSchema *vschemapb.Keyspace, err error) { // Important variables are pulled out here. var ( // lookup vindex info vindexName string vindex *vschemapb.Vindex targetKeyspace string targetTableName string vindexFromCols []string vindexToCol string // source table info sourceTableName string // sourceTable is the supplied table info sourceTable *vschemapb.Table // sourceVSchemaTable is the table info present in the vschema sourceVSchemaTable *vschemapb.Table // sourceVindexColumns are computed from the input sourceTable sourceVindexColumns []string // target table info createDDL string materializeQuery string ) // Validate input vindex if len(specs.Vindexes) != 1 { return nil, nil, nil, fmt.Errorf("only one vindex must be specified in the specs: %v", specs.Vindexes) } for name, vi := range specs.Vindexes { vindexName = name vindex = vi } if !strings.Contains(vindex.Type, "lookup") { return nil, nil, nil, fmt.Errorf("vindex %s is not a lookup type", vindex.Type) } strs := strings.Split(vindex.Params["table"], ".") if len(strs) != 2 { return nil, nil, nil, fmt.Errorf("vindex 'table' must be <keyspace>.<table>: %v", vindex) } targetKeyspace, targetTableName = strs[0], strs[1] vindexFromCols = strings.Split(vindex.Params["from"], ",") if strings.Contains(vindex.Type, "unique") { if len(vindexFromCols) != 1 { return nil, nil, nil, fmt.Errorf("unique vindex 'from' should have only one column: %v", vindex) } } else { if len(vindexFromCols) < 2 { return nil, nil, nil, fmt.Errorf("non-unique vindex 'from' should have more than one column: %v", vindex) } } vindexToCol = vindex.Params["to"] // Make the vindex write_only. If one exists already in the vschema, // it will need to match this vindex exactly, including the write_only setting. vindex.Params["write_only"] = "true" // See if we can create the vindex without errors. if _, err := vindexes.CreateVindex(vindex.Type, vindexName, vindex.Params); err != nil { return nil, nil, nil, err } // Validate input table if len(specs.Tables) != 1 { return nil, nil, nil, fmt.Errorf("exactly one table must be specified in the specs: %v", specs.Tables) } // Loop executes once. for k, ti := range specs.Tables { if len(ti.ColumnVindexes) != 1 { return nil, nil, nil, fmt.Errorf("exactly one ColumnVindex must be specified for the table: %v", specs.Tables) } sourceTableName = k sourceTable = ti } // Validate input table and vindex consistency if sourceTable.ColumnVindexes[0].Name != vindexName { return nil, nil, nil, fmt.Errorf("ColumnVindex name must match vindex name: %s vs %s", sourceTable.ColumnVindexes[0].Name, vindexName) } if vindex.Owner != "" && vindex.Owner != sourceTableName { return nil, nil, nil, fmt.Errorf("vindex owner must match table name: %v vs %v", vindex.Owner, sourceTableName) } if len(sourceTable.ColumnVindexes[0].Columns) != 0 { sourceVindexColumns = sourceTable.ColumnVindexes[0].Columns } else { if sourceTable.ColumnVindexes[0].Column == "" { return nil, nil, nil, fmt.Errorf("at least one column must be specified in ColumnVindexes: %v", sourceTable.ColumnVindexes) } sourceVindexColumns = []string{sourceTable.ColumnVindexes[0].Column} } if len(sourceVindexColumns) != len(vindexFromCols) { return nil, nil, nil, fmt.Errorf("length of table columns differes from length of vindex columns: %v vs %v", sourceVindexColumns, vindexFromCols) } // Validate against source vschema sourceVSchema, err = wr.ts.GetVSchema(ctx, keyspace) if err != nil { return nil, nil, nil, err } if sourceVSchema.Vindexes == nil { sourceVSchema.Vindexes = make(map[string]*vschemapb.Vindex) } // If source and target keyspaces are same, Make vschemas point to the same object. if keyspace == targetKeyspace { targetVSchema = sourceVSchema } else { targetVSchema, err = wr.ts.GetVSchema(ctx, targetKeyspace) if err != nil { return nil, nil, nil, err } } if targetVSchema.Vindexes == nil { targetVSchema.Vindexes = make(map[string]*vschemapb.Vindex) } if targetVSchema.Tables == nil { targetVSchema.Tables = make(map[string]*vschemapb.Table) } if existing, ok := sourceVSchema.Vindexes[vindexName]; ok { if !proto.Equal(existing, vindex) { return nil, nil, nil, fmt.Errorf("a conflicting vindex named %s already exists in the source vschema", vindexName) } } sourceVSchemaTable = sourceVSchema.Tables[sourceTableName] if sourceVSchemaTable == nil { if !schema.IsInternalOperationTableName(sourceTableName) { return nil, nil, nil, fmt.Errorf("source table %s not found in vschema", sourceTableName) } } for _, colVindex := range sourceVSchemaTable.ColumnVindexes { // For a conflict, the vindex name and column should match. if colVindex.Name != vindexName { continue } colName := colVindex.Column if len(colVindex.Columns) != 0 { colName = colVindex.Columns[0] } if colName == sourceVindexColumns[0] { return nil, nil, nil, fmt.Errorf("ColumnVindex for table %v already exists: %v, please remove it and try again", sourceTableName, colName) } } // Validate against source schema sourceShards, err := wr.ts.GetServingShards(ctx, keyspace) if err != nil { return nil, nil, nil, err } onesource := sourceShards[0] if onesource.PrimaryAlias == nil { return nil, nil, nil, fmt.Errorf("source shard has no primary: %v", onesource.ShardName()) } tableSchema, err := schematools.GetSchema(ctx, wr.ts, wr.tmc, onesource.PrimaryAlias, []string{sourceTableName}, nil, false) if err != nil { return nil, nil, nil, err } if len(tableSchema.TableDefinitions) != 1 { return nil, nil, nil, fmt.Errorf("unexpected number of tables returned from schema: %v", tableSchema.TableDefinitions) } // Generate "create table" statement lines := strings.Split(tableSchema.TableDefinitions[0].Schema, "\n") if len(lines) < 3 { // Unreachable return nil, nil, nil, fmt.Errorf("schema looks incorrect: %s, expecting at least four lines", tableSchema.TableDefinitions[0].Schema) } var modified []string modified = append(modified, strings.Replace(lines[0], sourceTableName, targetTableName, 1)) for i := range sourceVindexColumns { line, err := generateColDef(lines, sourceVindexColumns[i], vindexFromCols[i]) if err != nil { return nil, nil, nil, err } modified = append(modified, line) } if vindex.Params["data_type"] == "" || strings.EqualFold(vindex.Type, "consistent_lookup_unique") || strings.EqualFold(vindex.Type, "consistent_lookup") { modified = append(modified, fmt.Sprintf(" `%s` varbinary(128),", vindexToCol)) } else { modified = append(modified, fmt.Sprintf(" `%s` `%s`,", vindexToCol, vindex.Params["data_type"])) } buf := sqlparser.NewTrackedBuffer(nil) fmt.Fprintf(buf, " PRIMARY KEY (") prefix := "" for _, col := range vindexFromCols { fmt.Fprintf(buf, "%s`%s`", prefix, col) prefix = ", " } fmt.Fprintf(buf, ")") modified = append(modified, buf.String()) modified = append(modified, ")") createDDL = strings.Join(modified, "\n") // Generate vreplication query buf = sqlparser.NewTrackedBuffer(nil) buf.Myprintf("select ") for i := range vindexFromCols { buf.Myprintf("%v as %v, ", sqlparser.NewColIdent(sourceVindexColumns[i]), sqlparser.NewColIdent(vindexFromCols[i])) } if strings.EqualFold(vindexToCol, "keyspace_id") || strings.EqualFold(vindex.Type, "consistent_lookup_unique") || strings.EqualFold(vindex.Type, "consistent_lookup") { buf.Myprintf("keyspace_id() as %v ", sqlparser.NewColIdent(vindexToCol)) } else { buf.Myprintf("%v as %v ", sqlparser.NewColIdent(vindexToCol), sqlparser.NewColIdent(vindexToCol)) } buf.Myprintf("from %v", sqlparser.NewTableIdent(sourceTableName)) if vindex.Owner != "" { // Only backfill buf.Myprintf(" group by ") for i := range vindexFromCols { buf.Myprintf("%v, ", sqlparser.NewColIdent(vindexFromCols[i])) } buf.Myprintf("%v", sqlparser.NewColIdent(vindexToCol)) } materializeQuery = buf.String() // Update targetVSchema var targetTable *vschemapb.Table if targetVSchema.Sharded { // Choose a primary vindex type for target table based on source specs var targetVindexType string var targetVindex *vschemapb.Vindex for _, field := range tableSchema.TableDefinitions[0].Fields { if sourceVindexColumns[0] == field.Name { targetVindexType, err = vindexes.ChooseVindexForType(field.Type) if err != nil { return nil, nil, nil, err } targetVindex = &vschemapb.Vindex{ Type: targetVindexType, } break } } if targetVindex == nil { // Unreachable. We validated column names when generating the DDL. return nil, nil, nil, fmt.Errorf("column %s not found in schema %v", sourceVindexColumns[0], tableSchema.TableDefinitions[0]) } if existing, ok := targetVSchema.Vindexes[targetVindexType]; ok { if !proto.Equal(existing, targetVindex) { return nil, nil, nil, fmt.Errorf("a conflicting vindex named %v already exists in the target vschema", targetVindexType) } } else { targetVSchema.Vindexes[targetVindexType] = targetVindex } targetTable = &vschemapb.Table{ ColumnVindexes: []*vschemapb.ColumnVindex{{ Column: vindexFromCols[0], Name: targetVindexType, }}, } } else { targetTable = &vschemapb.Table{} } if existing, ok := targetVSchema.Tables[targetTableName]; ok { if !proto.Equal(existing, targetTable) { return nil, nil, nil, fmt.Errorf("a conflicting table named %v already exists in the target vschema", targetTableName) }<|fim▁hole|> } else { targetVSchema.Tables[targetTableName] = targetTable } ms = &vtctldatapb.MaterializeSettings{ Workflow: targetTableName + "_vdx", MaterializationIntent: vtctldatapb.MaterializationIntent_CREATELOOKUPINDEX, SourceKeyspace: keyspace, TargetKeyspace: targetKeyspace, StopAfterCopy: vindex.Owner != "" && !continueAfterCopyWithOwner, TableSettings: []*vtctldatapb.TableMaterializeSettings{{ TargetTable: targetTableName, SourceExpression: materializeQuery, CreateDdl: createDDL, }}, } // Update sourceVSchema sourceVSchema.Vindexes[vindexName] = vindex sourceVSchemaTable.ColumnVindexes = append(sourceVSchemaTable.ColumnVindexes, sourceTable.ColumnVindexes[0]) return ms, sourceVSchema, targetVSchema, nil } func generateColDef(lines []string, sourceVindexCol, vindexFromCol string) (string, error) { source := fmt.Sprintf("`%s`", sourceVindexCol) target := fmt.Sprintf("`%s`", vindexFromCol) for _, line := range lines[1:] { if strings.Contains(line, source) { line = strings.Replace(line, source, target, 1) line = strings.Replace(line, " AUTO_INCREMENT", "", 1) line = strings.Replace(line, " DEFAULT NULL", "", 1) return line, nil } } return "", fmt.Errorf("column %s not found in schema %v", sourceVindexCol, lines) } // ExternalizeVindex externalizes a lookup vindex that's finished backfilling or has caught up. func (wr *Wrangler) ExternalizeVindex(ctx context.Context, qualifiedVindexName string) error { splits := strings.Split(qualifiedVindexName, ".") if len(splits) != 2 { return fmt.Errorf("vindex name should be of the form keyspace.vindex: %s", qualifiedVindexName) } sourceKeyspace, vindexName := splits[0], splits[1] sourceVSchema, err := wr.ts.GetVSchema(ctx, sourceKeyspace) if err != nil { return err } sourceVindex := sourceVSchema.Vindexes[vindexName] if sourceVindex == nil { return fmt.Errorf("vindex %s not found in vschema", qualifiedVindexName) } qualifiedTableName := sourceVindex.Params["table"] splits = strings.Split(qualifiedTableName, ".") if len(splits) != 2 { return fmt.Errorf("table name in vindex should be of the form keyspace.table: %s", qualifiedTableName) } targetKeyspace, targetTableName := splits[0], splits[1] workflow := targetTableName + "_vdx" targetShards, err := wr.ts.GetServingShards(ctx, targetKeyspace) if err != nil { return err } // Create a parallelizer function. forAllTargets := func(f func(*topo.ShardInfo) error) error { var wg sync.WaitGroup allErrors := &concurrency.AllErrorRecorder{} for _, targetShard := range targetShards { wg.Add(1) go func(targetShard *topo.ShardInfo) { defer wg.Done() if err := f(targetShard); err != nil { allErrors.RecordError(err) } }(targetShard) } wg.Wait() return allErrors.AggrError(vterrors.Aggregate) } err = forAllTargets(func(targetShard *topo.ShardInfo) error { targetPrimary, err := wr.ts.GetTablet(ctx, targetShard.PrimaryAlias) if err != nil { return err } p3qr, err := wr.tmc.VReplicationExec(ctx, targetPrimary.Tablet, fmt.Sprintf("select id, state, message, source from _vt.vreplication where workflow=%s and db_name=%s", encodeString(workflow), encodeString(targetPrimary.DbName()))) if err != nil { return err } qr := sqltypes.Proto3ToResult(p3qr) for _, row := range qr.Rows { id, err := evalengine.ToInt64(row[0]) if err != nil { return err } state := row[1].ToString() message := row[2].ToString() var bls binlogdatapb.BinlogSource sourceBytes, err := row[3].ToBytes() if err != nil { return err } if err := prototext.Unmarshal(sourceBytes, &bls); err != nil { return err } if sourceVindex.Owner == "" || !bls.StopAfterCopy { // If there's no owner or we've requested that the workflow NOT be stopped // after the copy phase completes, then all streams need to be running. if state != binlogplayer.BlpRunning { return fmt.Errorf("stream %d for %v.%v is not in Running state: %v", id, targetShard.Keyspace(), targetShard.ShardName(), state) } } else { // If there is an owner, all streams need to be stopped after copy. if state != binlogplayer.BlpStopped || !strings.Contains(message, "Stopped after copy") { return fmt.Errorf("stream %d for %v.%v is not in Stopped after copy state: %v, %v", id, targetShard.Keyspace(), targetShard.ShardName(), state, message) } } } return nil }) if err != nil { return err } if sourceVindex.Owner != "" { // If there is an owner, we have to delete the streams. err := forAllTargets(func(targetShard *topo.ShardInfo) error { targetPrimary, err := wr.ts.GetTablet(ctx, targetShard.PrimaryAlias) if err != nil { return err } query := fmt.Sprintf("delete from _vt.vreplication where db_name=%s and workflow=%s", encodeString(targetPrimary.DbName()), encodeString(workflow)) _, err = wr.tmc.VReplicationExec(ctx, targetPrimary.Tablet, query) if err != nil { return err } return nil }) if err != nil { return err } } // Remove the write_only param and save the source vschema. delete(sourceVindex.Params, "write_only") if err := wr.ts.SaveVSchema(ctx, sourceKeyspace, sourceVSchema); err != nil { return err } return wr.ts.RebuildSrvVSchema(ctx, nil) } // func (wr *Wrangler) collectTargetStreams(ctx context.Context, mz *materializer) ([]string, error) { var shardTablets []string var mu sync.Mutex err := mz.forAllTargets(func(target *topo.ShardInfo) error { var qrproto *querypb.QueryResult var id int64 var err error targetPrimary, err := mz.wr.ts.GetTablet(ctx, target.PrimaryAlias) if err != nil { return vterrors.Wrapf(err, "GetTablet(%v) failed", target.PrimaryAlias) } query := fmt.Sprintf("select id from _vt.vreplication where db_name=%s and workflow=%s", encodeString(targetPrimary.DbName()), encodeString(mz.ms.Workflow)) if qrproto, err = mz.wr.tmc.VReplicationExec(ctx, targetPrimary.Tablet, query); err != nil { return vterrors.Wrapf(err, "VReplicationExec(%v, %s)", targetPrimary.Tablet, query) } qr := sqltypes.Proto3ToResult(qrproto) for i := 0; i < len(qr.Rows); i++ { id, err = evalengine.ToInt64(qr.Rows[i][0]) if err != nil { return err } mu.Lock() shardTablets = append(shardTablets, fmt.Sprintf("%s:%d", target.ShardName(), id)) mu.Unlock() } return nil }) if err != nil { return nil, err } return shardTablets, nil } // getMigrationID produces a reproducible hash based on the input parameters. func getMigrationID(targetKeyspace string, shardTablets []string) (int64, error) { sort.Strings(shardTablets) hasher := fnv.New64() hasher.Write([]byte(targetKeyspace)) for _, str := range shardTablets { hasher.Write([]byte(str)) } // Convert to int64 after dropping the highest bit. return int64(hasher.Sum64() & math.MaxInt64), nil } func (wr *Wrangler) prepareMaterializerStreams(ctx context.Context, ms *vtctldatapb.MaterializeSettings) (*materializer, error) { if err := wr.validateNewWorkflow(ctx, ms.TargetKeyspace, ms.Workflow); err != nil { return nil, err } mz, err := wr.buildMaterializer(ctx, ms) if err != nil { return nil, err } if err := mz.deploySchema(ctx); err != nil { return nil, err } insertMap := make(map[string]string, len(mz.targetShards)) for _, targetShard := range mz.targetShards { inserts, err := mz.generateInserts(ctx, targetShard) if err != nil { return nil, err } insertMap[targetShard.ShardName()] = inserts } if err := mz.createStreams(ctx, insertMap); err != nil { return nil, err } return mz, nil } // Materialize performs the steps needed to materialize a list of tables based on the materialization specs. func (wr *Wrangler) Materialize(ctx context.Context, ms *vtctldatapb.MaterializeSettings) error { mz, err := wr.prepareMaterializerStreams(ctx, ms) if err != nil { return err } return mz.startStreams(ctx) } func (wr *Wrangler) buildMaterializer(ctx context.Context, ms *vtctldatapb.MaterializeSettings) (*materializer, error) { vschema, err := wr.ts.GetVSchema(ctx, ms.TargetKeyspace) if err != nil { return nil, err } targetVSchema, err := vindexes.BuildKeyspaceSchema(vschema, ms.TargetKeyspace) if err != nil { return nil, err } if targetVSchema.Keyspace.Sharded { for _, ts := range ms.TableSettings { if targetVSchema.Tables[ts.TargetTable] == nil { return nil, fmt.Errorf("table %s not found in vschema for keyspace %s", ts.TargetTable, ms.TargetKeyspace) } } } sourceShards, err := wr.sourceTs.GetServingShards(ctx, ms.SourceKeyspace) if err != nil { return nil, err } targetShards, err := wr.ts.GetServingShards(ctx, ms.TargetKeyspace) if err != nil { return nil, err } return &materializer{ wr: wr, ms: ms, targetVSchema: targetVSchema, sourceShards: sourceShards, targetShards: targetShards, }, nil } func (mz *materializer) getSourceTableDDLs(ctx context.Context) (map[string]string, error) { sourceDDLs := make(map[string]string) allTables := []string{"/.*/"} sourcePrimary := mz.sourceShards[0].PrimaryAlias if sourcePrimary == nil { return nil, fmt.Errorf("source shard must have a primary for copying schema: %v", mz.sourceShards[0].ShardName()) } ti, err := mz.wr.sourceTs.GetTablet(ctx, sourcePrimary) if err != nil { return nil, err } sourceSchema, err := mz.wr.tmc.GetSchema(ctx, ti.Tablet, allTables, nil, false) if err != nil { return nil, err } for _, td := range sourceSchema.TableDefinitions { sourceDDLs[td.Name] = td.Schema } return sourceDDLs, nil } func (mz *materializer) deploySchema(ctx context.Context) error { var sourceDDLs map[string]string var mu sync.Mutex return mz.forAllTargets(func(target *topo.ShardInfo) error { allTables := []string{"/.*/"} hasTargetTable := map[string]bool{} targetSchema, err := schematools.GetSchema(ctx, mz.wr.ts, mz.wr.tmc, target.PrimaryAlias, allTables, nil, false) if err != nil { return err } for _, td := range targetSchema.TableDefinitions { hasTargetTable[td.Name] = true } targetTablet, err := mz.wr.ts.GetTablet(ctx, target.PrimaryAlias) if err != nil { return err } var applyDDLs []string for _, ts := range mz.ms.TableSettings { if hasTargetTable[ts.TargetTable] { // Table already exists. continue } if ts.CreateDdl == "" { return fmt.Errorf("target table %v does not exist and there is no create ddl defined", ts.TargetTable) } var err error mu.Lock() if len(sourceDDLs) == 0 { //only get ddls for tables, once and lazily: if we need to copy the schema from source to target //we copy schemas from primaries on the source keyspace //and we have found use cases where user just has a replica (no primary) in the source keyspace sourceDDLs, err = mz.getSourceTableDDLs(ctx) } mu.Unlock() if err != nil { log.Errorf("Error getting DDLs of source tables: %s", err.Error()) return err } createDDL := ts.CreateDdl if createDDL == createDDLAsCopy || createDDL == createDDLAsCopyDropConstraint { if ts.SourceExpression != "" { // Check for table if non-empty SourceExpression. sourceTableName, err := sqlparser.TableFromStatement(ts.SourceExpression) if err != nil { return err } if sourceTableName.Name.String() != ts.TargetTable { return fmt.Errorf("source and target table names must match for copying schema: %v vs %v", sqlparser.String(sourceTableName), ts.TargetTable) } } ddl, ok := sourceDDLs[ts.TargetTable] if !ok { return fmt.Errorf("source table %v does not exist", ts.TargetTable) } if createDDL == createDDLAsCopyDropConstraint { strippedDDL, err := stripTableConstraints(ddl) if err != nil { return err } ddl = strippedDDL } createDDL = ddl } applyDDLs = append(applyDDLs, createDDL) } if len(applyDDLs) > 0 { sql := strings.Join(applyDDLs, ";\n") _, err = mz.wr.tmc.ApplySchema(ctx, targetTablet.Tablet, &tmutils.SchemaChange{ SQL: sql, Force: false, AllowReplication: true, SQLMode: vreplication.SQLMode, }) if err != nil { return err } } return nil }) } func stripTableConstraints(ddl string) (string, error) { ast, err := sqlparser.ParseStrictDDL(ddl) if err != nil { return "", err } stripConstraints := func(cursor *sqlparser.Cursor) bool { switch node := cursor.Node().(type) { case sqlparser.DDLStatement: if node.GetTableSpec() != nil { node.GetTableSpec().Constraints = nil } } return true } noConstraintAST := sqlparser.Rewrite(ast, stripConstraints, nil) newDDL := sqlparser.String(noConstraintAST) return newDDL, nil } func (mz *materializer) generateInserts(ctx context.Context, targetShard *topo.ShardInfo) (string, error) { ig := vreplication.NewInsertGenerator(binlogplayer.BlpStopped, "{{.dbname}}") for _, sourceShard := range mz.sourceShards { // Don't create streams from sources which won't contain data for the target shard. // We only do it for MoveTables for now since this doesn't hold for materialize flows // where the target's sharding key might differ from that of the source if mz.ms.MaterializationIntent == vtctldatapb.MaterializationIntent_MOVETABLES && !key.KeyRangesIntersect(sourceShard.KeyRange, targetShard.KeyRange) { continue } bls := &binlogdatapb.BinlogSource{ Keyspace: mz.ms.SourceKeyspace, Shard: sourceShard.ShardName(), Filter: &binlogdatapb.Filter{}, StopAfterCopy: mz.ms.StopAfterCopy, ExternalCluster: mz.ms.ExternalCluster, } for _, ts := range mz.ms.TableSettings { rule := &binlogdatapb.Rule{ Match: ts.TargetTable, } if ts.SourceExpression == "" { bls.Filter.Rules = append(bls.Filter.Rules, rule) continue } // Validate non-empty query. stmt, err := sqlparser.Parse(ts.SourceExpression) if err != nil { return "", err } sel, ok := stmt.(*sqlparser.Select) if !ok { return "", fmt.Errorf("unrecognized statement: %s", ts.SourceExpression) } filter := ts.SourceExpression if mz.targetVSchema.Keyspace.Sharded && mz.targetVSchema.Tables[ts.TargetTable].Type != vindexes.TypeReference { cv, err := vindexes.FindBestColVindex(mz.targetVSchema.Tables[ts.TargetTable]) if err != nil { return "", err } mappedCols := make([]*sqlparser.ColName, 0, len(cv.Columns)) for _, col := range cv.Columns { colName, err := matchColInSelect(col, sel) if err != nil { return "", err } mappedCols = append(mappedCols, colName) } subExprs := make(sqlparser.SelectExprs, 0, len(mappedCols)+2) for _, mappedCol := range mappedCols { subExprs = append(subExprs, &sqlparser.AliasedExpr{Expr: mappedCol}) } vindexName := fmt.Sprintf("%s.%s", mz.ms.TargetKeyspace, cv.Name) subExprs = append(subExprs, &sqlparser.AliasedExpr{Expr: sqlparser.NewStrLiteral(vindexName)}) subExprs = append(subExprs, &sqlparser.AliasedExpr{Expr: sqlparser.NewStrLiteral("{{.keyrange}}")}) inKeyRange := &sqlparser.FuncExpr{ Name: sqlparser.NewColIdent("in_keyrange"), Exprs: subExprs, } if sel.Where != nil { sel.Where = &sqlparser.Where{ Type: sqlparser.WhereClause, Expr: &sqlparser.AndExpr{ Left: inKeyRange, Right: sel.Where.Expr, }, } } else { sel.Where = &sqlparser.Where{ Type: sqlparser.WhereClause, Expr: inKeyRange, } } filter = sqlparser.String(sel) } rule.Filter = filter bls.Filter.Rules = append(bls.Filter.Rules, rule) } ig.AddRow(mz.ms.Workflow, bls, "", mz.ms.Cell, mz.ms.TabletTypes) } return ig.String(), nil } func matchColInSelect(col sqlparser.ColIdent, sel *sqlparser.Select) (*sqlparser.ColName, error) { for _, selExpr := range sel.SelectExprs { switch selExpr := selExpr.(type) { case *sqlparser.StarExpr: return &sqlparser.ColName{Name: col}, nil case *sqlparser.AliasedExpr: match := selExpr.As if match.IsEmpty() { if colExpr, ok := selExpr.Expr.(*sqlparser.ColName); ok { match = colExpr.Name } else { // Cannot match against a complex expression. continue } } if match.Equal(col) { colExpr, ok := selExpr.Expr.(*sqlparser.ColName) if !ok { return nil, fmt.Errorf("vindex column cannot be a complex expression: %v", sqlparser.String(selExpr)) } return colExpr, nil } default: return nil, fmt.Errorf("unsupported select expression: %v", sqlparser.String(selExpr)) } } return nil, fmt.Errorf("could not find vindex column %v", sqlparser.String(col)) } func (mz *materializer) createStreams(ctx context.Context, insertsMap map[string]string) error { return mz.forAllTargets(func(target *topo.ShardInfo) error { inserts := insertsMap[target.ShardName()] targetPrimary, err := mz.wr.ts.GetTablet(ctx, target.PrimaryAlias) if err != nil { return vterrors.Wrapf(err, "GetTablet(%v) failed", target.PrimaryAlias) } buf := &strings.Builder{} t := template.Must(template.New("").Parse(inserts)) input := map[string]string{ "keyrange": key.KeyRangeString(target.KeyRange), "dbname": targetPrimary.DbName(), } if err := t.Execute(buf, input); err != nil { return err } if _, err := mz.wr.TabletManagerClient().VReplicationExec(ctx, targetPrimary.Tablet, buf.String()); err != nil { return err } return nil }) } func (mz *materializer) startStreams(ctx context.Context) error { return mz.forAllTargets(func(target *topo.ShardInfo) error { targetPrimary, err := mz.wr.ts.GetTablet(ctx, target.PrimaryAlias) if err != nil { return vterrors.Wrapf(err, "GetTablet(%v) failed", target.PrimaryAlias) } query := fmt.Sprintf("update _vt.vreplication set state='Running' where db_name=%s and workflow=%s", encodeString(targetPrimary.DbName()), encodeString(mz.ms.Workflow)) if _, err := mz.wr.tmc.VReplicationExec(ctx, targetPrimary.Tablet, query); err != nil { return vterrors.Wrapf(err, "VReplicationExec(%v, %s)", targetPrimary.Tablet, query) } return nil }) } func (mz *materializer) forAllTargets(f func(*topo.ShardInfo) error) error { var wg sync.WaitGroup allErrors := &concurrency.AllErrorRecorder{} for _, target := range mz.targetShards { wg.Add(1) go func(target *topo.ShardInfo) { defer wg.Done() if err := f(target); err != nil { allErrors.RecordError(err) } }(target) } wg.Wait() return allErrors.AggrError(vterrors.Aggregate) }<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const Koa = require('koa') const screenshot = require('./screenshot') const app = new Koa() app.use(async ctx => { var url = ctx.query.url console.log('goto:', url) if (!/^https?:\/\/.+/.test(url)) { ctx.body = 'url 不合法' } else { if (!isNaN(ctx.query.wait)) { ctx.query.wait = ~~ctx.query.wait }<|fim▁hole|> ctx.body = 'data:image/jpeg;base64,' + data } else { ctx.body = `<img src="data:image/jpeg;base64,${data}" />` } } }) app.listen(8000) console.log('server start success at 8000') // process.on('unCaughtException', function (err) { // console.log(err) // })<|fim▁end|>
let data = await screenshot(url, ctx.query.wait, ~~ctx.query.width) if (ctx.query.base64) {
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "encoding/base32" "encoding/hex" "fmt" "io" "io/ioutil" "net/url" "os" "strings" ) func escape(encoding string) { switch { case strings.HasPrefix("query", encoding): b, err := ioutil.ReadAll(os.Stdin) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } os.Stdout.Write([]byte(url.QueryEscape(string(b)))) case strings.HasPrefix("hex", encoding): b, err := ioutil.ReadAll(os.Stdin) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } os.Stdout.Write([]byte(hex.EncodeToString(b))) default: fmt.Fprintf(os.Stderr, "unknown escape encoding: %q\n", encoding) os.Exit(2) } } func unescape(encoding string) { switch { case strings.HasPrefix("query", encoding): b, err := ioutil.ReadAll(os.Stdin) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } s, err := url.QueryUnescape(string(b)) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } os.Stdout.Write([]byte(s)) case strings.HasPrefix("b32", encoding): d := base32.NewDecoder(base32.StdEncoding, os.Stdin) io.Copy(os.Stdout, d) default: fmt.Fprintf(os.Stderr, "unknown unescape encoding: %q\n", encoding) } } func main() { if len(os.Args) != 3 { fmt.Fprintf(os.Stderr, "expected two arguments: <mode> <encoding>: got %d\n", len(os.Args)-1) os.Exit(2) } mode := os.Args[1] switch { case strings.HasPrefix("escape", mode): escape(os.Args[2]) case strings.HasPrefix("unescape", mode) || strings.HasPrefix("decode", mode): unescape(os.Args[2]) default:<|fim▁hole|> } }<|fim▁end|>
fmt.Fprintf(os.Stderr, "unknown mode: %q\n", mode) os.Exit(2)