prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>StopPaymentTask.java<|end_file_name|><|fim▁begin|>package com.citypark.api.task; import com.citypark.api.parser.CityParkStartPaymentParser; import android.content.Context; import android.os.AsyncTask; import android.text.format.Time; public class StopPaymentTask extends AsyncTask<Void, Void, Boolean> { private Context context; private String sessionId; private String paymentProviderName; private double latitude;<|fim▁hole|> private double longitude; private String operationStatus; public StopPaymentTask(Context context, String sessionId, String paymentProviderName, double latitude, double longitude, String operationStatus) { super(); this.context = context; this.sessionId = sessionId; this.paymentProviderName = paymentProviderName; this.latitude = latitude; this.longitude = longitude; this.operationStatus = operationStatus; } @Override protected Boolean doInBackground(Void... params) { //update citypark through API on success or failure CityParkStartPaymentParser parser = new CityParkStartPaymentParser(context, sessionId, paymentProviderName, latitude, longitude, operationStatus); parser.parse(); return true; } }<|fim▁end|>
<|file_name|>ConnectedNode.js<|end_file_name|><|fim▁begin|>goog.provide('crow.ConnectedNode'); goog.require('crow.Node'); /**<|fim▁hole|> crow.ConnectedNode = function(id){ crow.Node.apply(this, arguments); this.connections = []; this.connectionDistances = {}; }; crow.ConnectedNode.prototype = new crow.Node(); crow.ConnectedNode.prototype.connectTo = function(otherNode, distance, symmetric){ if(typeof distance == "undefined") distance = 1; this.connections.push(otherNode); this.connectionDistances[otherNode.id] = distance; if(typeof symmetric !== "false" && otherNode instanceof crow.ConnectedNode){ otherNode.connections.push(this); otherNode.connectionDistances[this.id] = distance; } }; crow.ConnectedNode.prototype.getNeighbors = function(){ return this.connections; }; crow.ConnectedNode.prototype.distanceToNeighbor = function(otherNode){ return this.connectionDistances[otherNode.id] || Infinity; }<|fim▁end|>
* ConnectedNodes are nodes that have to be explicitly "connected" to other nodes. * @class */
<|file_name|>apk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Kevin Brebanov <https://github.com/kbrebanov> # Based on pacman (Afterburn <http://github.com/afterburn>, Aaron Bull Schaefer <[email protected]>) # and apt (Matthew Williams <[email protected]>) modules. # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: apk short_description: Manages apk packages description: - Manages I(apk) packages for Alpine Linux. author: "Kevin Brebanov (@kbrebanov)" version_added: "2.0" options: available: description: - During upgrade, reset versioned world dependencies and change logic to prefer replacing or downgrading packages (instead of holding them) if the currently installed package is no longer available from any repository. required: false default: no choices: [ "yes", "no" ] version_added: "2.4" name: description: - A package name, like C(foo), or multiple packages, like C(foo, bar). required: false default: null repository: description: - A package repository or multiple repositories. Unlike with the underlying apk command, this list will override the system repositories rather than supplement them. required: false default: null version_added: "2.4" state: description: - Indicates the desired package(s) state. - C(present) ensures the package(s) is/are present. - C(absent) ensures the package(s) is/are absent. - C(latest) ensures the package(s) is/are present and the latest version(s). required: false default: present choices: [ "present", "absent", "latest" ] update_cache: description: - Update repository indexes. Can be run with other steps or on it's own. required: false default: no choices: [ "yes", "no" ] upgrade: description: - Upgrade all installed packages to their latest version. required: false default: no choices: [ "yes", "no" ] notes: - '"name" and "upgrade" are mutually exclusive.' - When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option. ''' EXAMPLES = ''' # Update repositories and install "foo" package - apk: name: foo update_cache: yes # Update repositories and install "foo" and "bar" packages - apk: name: foo,bar update_cache: yes # Remove "foo" package - apk: name: foo state: absent # Remove "foo" and "bar" packages - apk: name: foo,bar state: absent # Install the package "foo"<|fim▁hole|>- apk: name: foo state: present # Install the packages "foo" and "bar" - apk: name: foo,bar state: present # Update repositories and update package "foo" to latest version - apk: name: foo state: latest update_cache: yes # Update repositories and update packages "foo" and "bar" to latest versions - apk: name: foo,bar state: latest update_cache: yes # Update all installed packages to the latest versions - apk: upgrade: yes # Upgrade / replace / downgrade / uninstall all installed packages to the latest versions available - apk: available: yes upgrade: yes # Update repositories as a separate step - apk: update_cache: yes # Install package from a specific repository - apk: name: foo state: latest update_cache: yes repository: http://dl-3.alpinelinux.org/alpine/edge/main ''' RETURN = ''' packages: description: a list of packages that have been changed returned: when packages have changed type: list sample: ['package', 'other-package'] ''' import re # Import module snippets. from ansible.module_utils.basic import AnsibleModule def parse_for_packages(stdout): packages = [] data = stdout.split('\n') regex = re.compile(r'^\(\d+/\d+\)\s+\S+\s+(\S+)') for l in data: p = regex.search(l) if p: packages.append(p.group(1)) return packages def update_package_db(module, exit): cmd = "%s update" % (APK_PATH) rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc != 0: module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr) elif exit: module.exit_json(changed=True, msg='updated repository indexes', stdout=stdout, stderr=stderr) else: return True def query_toplevel(module, name): # /etc/apk/world contains a list of top-level packages separated by ' ' or \n # packages may contain repository (@) or version (=<>~) separator characters or start with negation ! regex = re.compile(r'^' + re.escape(name) + r'([@=<>~].+)?$') with open('/etc/apk/world') as f: content = f.read().split() for p in content: if regex.search(p): return True return False def query_package(module, name): cmd = "%s -v info --installed %s" % (APK_PATH, name) rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc == 0: return True else: return False def query_latest(module, name): cmd = "%s version %s" % (APK_PATH, name) rc, stdout, stderr = module.run_command(cmd, check_rc=False) search_pattern = r"(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (re.escape(name)) match = re.search(search_pattern, stdout) if match and match.group(2) == "<": return False return True def query_virtual(module, name): cmd = "%s -v info --description %s" % (APK_PATH, name) rc, stdout, stderr = module.run_command(cmd, check_rc=False) search_pattern = r"^%s: virtual meta package" % (re.escape(name)) if re.search(search_pattern, stdout): return True return False def get_dependencies(module, name): cmd = "%s -v info --depends %s" % (APK_PATH, name) rc, stdout, stderr = module.run_command(cmd, check_rc=False) dependencies = stdout.split() if len(dependencies) > 1: return dependencies[1:] else: return [] def upgrade_packages(module, available): if module.check_mode: cmd = "%s upgrade --simulate" % (APK_PATH) else: cmd = "%s upgrade" % (APK_PATH) if available: cmd = "%s --available" % cmd rc, stdout, stderr = module.run_command(cmd, check_rc=False) packagelist = parse_for_packages(stdout) if rc != 0: module.fail_json(msg="failed to upgrade packages", stdout=stdout, stderr=stderr, packages=packagelist) if re.search(r'^OK', stdout): module.exit_json(changed=False, msg="packages already upgraded", stdout=stdout, stderr=stderr, packages=packagelist) module.exit_json(changed=True, msg="upgraded packages", stdout=stdout, stderr=stderr, packages=packagelist) def install_packages(module, names, state): upgrade = False to_install = [] to_upgrade = [] for name in names: # Check if virtual package if query_virtual(module, name): # Get virtual package dependencies dependencies = get_dependencies(module, name) for dependency in dependencies: if state == 'latest' and not query_latest(module, dependency): to_upgrade.append(dependency) else: if not query_toplevel(module, name): to_install.append(name) elif state == 'latest' and not query_latest(module, name): to_upgrade.append(name) if to_upgrade: upgrade = True if not to_install and not upgrade: module.exit_json(changed=False, msg="package(s) already installed") packages = " ".join(to_install + to_upgrade) if upgrade: if module.check_mode: cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages) else: cmd = "%s add --upgrade %s" % (APK_PATH, packages) else: if module.check_mode: cmd = "%s add --simulate %s" % (APK_PATH, packages) else: cmd = "%s add %s" % (APK_PATH, packages) rc, stdout, stderr = module.run_command(cmd, check_rc=False) packagelist = parse_for_packages(stdout) if rc != 0: module.fail_json(msg="failed to install %s" % (packages), stdout=stdout, stderr=stderr, packages=packagelist) module.exit_json(changed=True, msg="installed %s package(s)" % (packages), stdout=stdout, stderr=stderr, packages=packagelist) def remove_packages(module, names): installed = [] for name in names: if query_package(module, name): installed.append(name) if not installed: module.exit_json(changed=False, msg="package(s) already removed") names = " ".join(installed) if module.check_mode: cmd = "%s del --purge --simulate %s" % (APK_PATH, names) else: cmd = "%s del --purge %s" % (APK_PATH, names) rc, stdout, stderr = module.run_command(cmd, check_rc=False) packagelist = parse_for_packages(stdout) # Check to see if packages are still present because of dependencies for name in installed: if query_package(module, name): rc = 1 break if rc != 0: module.fail_json(msg="failed to remove %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist) module.exit_json(changed=True, msg="removed %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist) # ========================================== # Main control flow. def main(): module = AnsibleModule( argument_spec=dict( state=dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']), name=dict(type='list'), repository=dict(type='list'), update_cache=dict(default='no', type='bool'), upgrade=dict(default='no', type='bool'), available=dict(default='no', type='bool'), ), required_one_of=[['name', 'update_cache', 'upgrade']], mutually_exclusive=[['name', 'upgrade']], supports_check_mode=True ) # Set LANG env since we parse stdout module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C') global APK_PATH APK_PATH = module.get_bin_path('apk', required=True) p = module.params # add repositories to the APK_PATH if p['repository']: for r in p['repository']: APK_PATH = "%s --repository %s --repositories-file /dev/null" % (APK_PATH, r) # normalize the state parameter if p['state'] in ['present', 'installed']: p['state'] = 'present' if p['state'] in ['absent', 'removed']: p['state'] = 'absent' if p['update_cache']: update_package_db(module, not p['name'] and not p['upgrade']) if p['upgrade']: upgrade_packages(module, p['available']) if p['state'] in ['present', 'latest']: install_packages(module, p['name'], p['state']) elif p['state'] == 'absent': remove_packages(module, p['name']) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>source-map-consumer-compiled.js<|end_file_name|><|fim▁begin|>/* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ 'use strict'; if (typeof define !== 'function') { var define = require('amdefine')(module, require); } define(function (require, exports, module) { var util = require('./util'); var binarySearch = require('./binary-search'); var ArraySet = require('./array-set').ArraySet; var base64VLQ = require('./base64-vlq'); var quickSort = require('./quick-sort').quickSort; function SourceMapConsumer(aSourceMap) { var sourceMap = aSourceMap; if (typeof aSourceMap === 'string') { sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); } return sourceMap.sections != null ? new IndexedSourceMapConsumer(sourceMap) : new BasicSourceMapConsumer(sourceMap); } SourceMapConsumer.fromSourceMap = function (aSourceMap) { return BasicSourceMapConsumer.fromSourceMap(aSourceMap); }; /** * The version of the source mapping spec that we are consuming. */ SourceMapConsumer.prototype._version = 3; // `__generatedMappings` and `__originalMappings` are arrays that hold the // parsed mapping coordinates from the source map's "mappings" attribute. They // are lazily instantiated, accessed via the `_generatedMappings` and // `_originalMappings` getters respectively, and we only parse the mappings // and create these arrays once queried for a source location. We jump through // these hoops because there can be many thousands of mappings, and parsing // them is expensive, so we only want to do it if we must. // // Each object in the arrays is of the form: // // { // generatedLine: The line number in the generated code, // generatedColumn: The column number in the generated code, // source: The path to the original source file that generated this // chunk of code, // originalLine: The line number in the original source that // corresponds to this chunk of generated code, // originalColumn: The column number in the original source that // corresponds to this chunk of generated code, // name: The name of the original symbol which generated this chunk of // code. // } // // All properties except for `generatedLine` and `generatedColumn` can be // `null`. // // `_generatedMappings` is ordered by the generated positions. // // `_originalMappings` is ordered by the original positions. SourceMapConsumer.prototype.__generatedMappings = null; Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { get: function get() { if (!this.__generatedMappings) { this._parseMappings(this._mappings, this.sourceRoot); } return this.__generatedMappings; } }); SourceMapConsumer.prototype.__originalMappings = null; Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { get: function get() { if (!this.__originalMappings) { this._parseMappings(this._mappings, this.sourceRoot); } return this.__originalMappings; } }); SourceMapConsumer.prototype._charIsMappingSeparator = function SourceMapConsumer_charIsMappingSeparator(aStr, index) { var c = aStr.charAt(index); return c === ";" || c === ","; }; /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and * `this.__originalMappings` properties).<|fim▁hole|> throw new Error("Subclasses must implement _parseMappings"); }; SourceMapConsumer.GENERATED_ORDER = 1; SourceMapConsumer.ORIGINAL_ORDER = 2; SourceMapConsumer.GREATEST_LOWER_BOUND = 1; SourceMapConsumer.LEAST_UPPER_BOUND = 2; /** * Iterate over each mapping between an original source/line/column and a * generated line/column in this source map. * * @param Function aCallback * The function that is called with each mapping. * @param Object aContext * Optional. If specified, this object will be the value of `this` every * time that `aCallback` is called. * @param aOrder * Either `SourceMapConsumer.GENERATED_ORDER` or * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to * iterate over the mappings sorted by the generated file's line/column * order or the original's source/line/column order, respectively. Defaults to * `SourceMapConsumer.GENERATED_ORDER`. */ SourceMapConsumer.prototype.eachMapping = function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { var context = aContext || null; var order = aOrder || SourceMapConsumer.GENERATED_ORDER; var mappings; switch (order) { case SourceMapConsumer.GENERATED_ORDER: mappings = this._generatedMappings; break; case SourceMapConsumer.ORIGINAL_ORDER: mappings = this._originalMappings; break; default: throw new Error("Unknown order of iteration."); } var sourceRoot = this.sourceRoot; mappings.map(function (mapping) { var source = mapping.source === null ? null : this._sources.at(mapping.source); if (source != null && sourceRoot != null) { source = util.join(sourceRoot, source); } return { source: source, generatedLine: mapping.generatedLine, generatedColumn: mapping.generatedColumn, originalLine: mapping.originalLine, originalColumn: mapping.originalColumn, name: mapping.name === null ? null : this._names.at(mapping.name) }; }, this).forEach(aCallback, context); }; /** * Returns all generated line and column information for the original source, * line, and column provided. If no column is provided, returns all mappings * corresponding to a either the line we are searching for or the next * closest line that has any mappings. Otherwise, returns all mappings * corresponding to the given line and either the column we are searching for * or the next closest column that has any offsets. * * The only argument is an object with the following properties: * * - source: The filename of the original source. * - line: The line number in the original source. * - column: Optional. the column number in the original source. * * and an array of objects is returned, each with the following properties: * * - line: The line number in the generated source, or null. * - column: The column number in the generated source, or null. */ SourceMapConsumer.prototype.allGeneratedPositionsFor = function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { var line = util.getArg(aArgs, 'line'); // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping // returns the index of the closest mapping less than the needle. By // setting needle.originalColumn to 0, we thus find the last mapping for // the given line, provided such a mapping exists. var needle = { source: util.getArg(aArgs, 'source'), originalLine: line, originalColumn: util.getArg(aArgs, 'column', 0) }; if (this.sourceRoot != null) { needle.source = util.relative(this.sourceRoot, needle.source); } if (!this._sources.has(needle.source)) { return []; } needle.source = this._sources.indexOf(needle.source); var mappings = []; var index = this._findMapping(needle, this._originalMappings, "originalLine", "originalColumn", util.compareByOriginalPositions, binarySearch.LEAST_UPPER_BOUND); if (index >= 0) { var mapping = this._originalMappings[index]; if (aArgs.column === undefined) { var originalLine = mapping.originalLine; // Iterate until either we run out of mappings, or we run into // a mapping for a different line than the one we found. Since // mappings are sorted, this is guaranteed to find all mappings for // the line we found. while (mapping && mapping.originalLine === originalLine) { mappings.push({ line: util.getArg(mapping, 'generatedLine', null), column: util.getArg(mapping, 'generatedColumn', null), lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }); mapping = this._originalMappings[++index]; } } else { var originalColumn = mapping.originalColumn; // Iterate until either we run out of mappings, or we run into // a mapping for a different line than the one we were searching for. // Since mappings are sorted, this is guaranteed to find all mappings for // the line we are searching for. while (mapping && mapping.originalLine === line && mapping.originalColumn == originalColumn) { mappings.push({ line: util.getArg(mapping, 'generatedLine', null), column: util.getArg(mapping, 'generatedColumn', null), lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }); mapping = this._originalMappings[++index]; } } } return mappings; }; exports.SourceMapConsumer = SourceMapConsumer; /** * A BasicSourceMapConsumer instance represents a parsed source map which we can * query for information about the original file positions by giving it a file * position in the generated source. * * The only parameter is the raw source map (either as a JSON string, or * already parsed to an object). According to the spec, source maps have the * following attributes: * * - version: Which version of the source map spec this map is following. * - sources: An array of URLs to the original source files. * - names: An array of identifiers which can be referrenced by individual mappings. * - sourceRoot: Optional. The URL root from which all sources are relative. * - sourcesContent: Optional. An array of contents of the original source files. * - mappings: A string of base64 VLQs which contain the actual mappings. * - file: Optional. The generated file this source map is associated with. * * Here is an example source map, taken from the source map spec[0]: * * { * version : 3, * file: "out.js", * sourceRoot : "", * sources: ["foo.js", "bar.js"], * names: ["src", "maps", "are", "fun"], * mappings: "AA,AB;;ABCDE;" * } * * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# */ function BasicSourceMapConsumer(aSourceMap) { var sourceMap = aSourceMap; if (typeof aSourceMap === 'string') { sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); } var version = util.getArg(sourceMap, 'version'); var sources = util.getArg(sourceMap, 'sources'); // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which // requires the array) to play nice here. var names = util.getArg(sourceMap, 'names', []); var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); var mappings = util.getArg(sourceMap, 'mappings'); var file = util.getArg(sourceMap, 'file', null); // Once again, Sass deviates from the spec and supplies the version as a // string rather than a number, so we use loose equality checking here. if (version != this._version) { throw new Error('Unsupported version: ' + version); } // Some source maps produce relative source paths like "./foo.js" instead of // "foo.js". Normalize these first so that future comparisons will succeed. // See bugzil.la/1090768. sources = sources.map(util.normalize); // Pass `true` below to allow duplicate names and sources. While source maps // are intended to be compressed and deduplicated, the TypeScript compiler // sometimes generates source maps with duplicates in them. See Github issue // #72 and bugzil.la/889492. this._names = ArraySet.fromArray(names, true); this._sources = ArraySet.fromArray(sources, true); this.sourceRoot = sourceRoot; this.sourcesContent = sourcesContent; this._mappings = mappings; this.file = file; } BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; /** * Create a BasicSourceMapConsumer from a SourceMapGenerator. * * @param SourceMapGenerator aSourceMap * The source map that will be consumed. * @returns BasicSourceMapConsumer */ BasicSourceMapConsumer.fromSourceMap = function SourceMapConsumer_fromSourceMap(aSourceMap) { var smc = Object.create(BasicSourceMapConsumer.prototype); var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); smc.sourceRoot = aSourceMap._sourceRoot; smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), smc.sourceRoot); smc.file = aSourceMap._file; // Because we are modifying the entries (by converting string sources and // names to indices into the sources and names ArraySets), we have to make // a copy of the entry or else bad things happen. Shared mutable state // strikes again! See github issue #191. var generatedMappings = aSourceMap._mappings.toArray().slice(); var destGeneratedMappings = smc.__generatedMappings = []; var destOriginalMappings = smc.__originalMappings = []; for (var i = 0, length = generatedMappings.length; i < length; i++) { var srcMapping = generatedMappings[i]; var destMapping = new Mapping(); destMapping.generatedLine = srcMapping.generatedLine; destMapping.generatedColumn = srcMapping.generatedColumn; if (srcMapping.source) { destMapping.source = sources.indexOf(srcMapping.source); destMapping.originalLine = srcMapping.originalLine; destMapping.originalColumn = srcMapping.originalColumn; if (srcMapping.name) { destMapping.name = names.indexOf(srcMapping.name); } destOriginalMappings.push(destMapping); } destGeneratedMappings.push(destMapping); } quickSort(smc.__originalMappings, util.compareByOriginalPositions); return smc; }; /** * The version of the source mapping spec that we are consuming. */ BasicSourceMapConsumer.prototype._version = 3; /** * The list of original sources. */ Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { get: function get() { return this._sources.toArray().map(function (s) { return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; }, this); } }); /** * Provide the JIT with a nice shape / hidden class. */ function Mapping() { this.generatedLine = 0; this.generatedColumn = 0; this.source = null; this.originalLine = null; this.originalColumn = null; this.name = null; } /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and * `this.__originalMappings` properties). */ BasicSourceMapConsumer.prototype._parseMappings = function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { var generatedLine = 1; var previousGeneratedColumn = 0; var previousOriginalLine = 0; var previousOriginalColumn = 0; var previousSource = 0; var previousName = 0; var length = aStr.length; var index = 0; var cachedSegments = {}; var temp = {}; var originalMappings = []; var generatedMappings = []; var mapping, str, segment, end, value; while (index < length) { if (aStr.charAt(index) === ';') { generatedLine++; index++; previousGeneratedColumn = 0; } else if (aStr.charAt(index) === ',') { index++; } else { mapping = new Mapping(); mapping.generatedLine = generatedLine; // Because each offset is encoded relative to the previous one, // many segments often have the same encoding. We can exploit this // fact by caching the parsed variable length fields of each segment, // allowing us to avoid a second parse if we encounter the same // segment again. for (end = index; end < length; end++) { if (this._charIsMappingSeparator(aStr, end)) { break; } } str = aStr.slice(index, end); segment = cachedSegments[str]; if (segment) { index += str.length; } else { segment = []; while (index < end) { base64VLQ.decode(aStr, index, temp); value = temp.value; index = temp.rest; segment.push(value); } if (segment.length === 2) { throw new Error('Found a source, but no line and column'); } if (segment.length === 3) { throw new Error('Found a source and line, but no column'); } cachedSegments[str] = segment; } // Generated column. mapping.generatedColumn = previousGeneratedColumn + segment[0]; previousGeneratedColumn = mapping.generatedColumn; if (segment.length > 1) { // Original source. mapping.source = previousSource + segment[1]; previousSource += segment[1]; // Original line. mapping.originalLine = previousOriginalLine + segment[2]; previousOriginalLine = mapping.originalLine; // Lines are stored 0-based mapping.originalLine += 1; // Original column. mapping.originalColumn = previousOriginalColumn + segment[3]; previousOriginalColumn = mapping.originalColumn; if (segment.length > 4) { // Original name. mapping.name = previousName + segment[4]; previousName += segment[4]; } } generatedMappings.push(mapping); if (typeof mapping.originalLine === 'number') { originalMappings.push(mapping); } } } quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); this.__generatedMappings = generatedMappings; quickSort(originalMappings, util.compareByOriginalPositions); this.__originalMappings = originalMappings; }; /** * Find the mapping that best matches the hypothetical "needle" mapping that * we are searching for in the given "haystack" of mappings. */ BasicSourceMapConsumer.prototype._findMapping = function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, aColumnName, aComparator, aBias) { // To return the position we are searching for, we must first find the // mapping for the given position and then return the opposite position it // points to. Because the mappings are sorted, we can use binary search to // find the best mapping. if (aNeedle[aLineName] <= 0) { throw new TypeError('Line must be greater than or equal to 1, got ' + aNeedle[aLineName]); } if (aNeedle[aColumnName] < 0) { throw new TypeError('Column must be greater than or equal to 0, got ' + aNeedle[aColumnName]); } return binarySearch.search(aNeedle, aMappings, aComparator, aBias); }; /** * Compute the last column for each generated mapping. The last column is * inclusive. */ BasicSourceMapConsumer.prototype.computeColumnSpans = function SourceMapConsumer_computeColumnSpans() { for (var index = 0; index < this._generatedMappings.length; ++index) { var mapping = this._generatedMappings[index]; // Mappings do not contain a field for the last generated columnt. We // can come up with an optimistic estimate, however, by assuming that // mappings are contiguous (i.e. given two consecutive mappings, the // first mapping ends where the second one starts). if (index + 1 < this._generatedMappings.length) { var nextMapping = this._generatedMappings[index + 1]; if (mapping.generatedLine === nextMapping.generatedLine) { mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; continue; } } // The last mapping for each line spans the entire line. mapping.lastGeneratedColumn = Infinity; } }; /** * Returns the original source, line, and column information for the generated * source's line and column positions provided. The only argument is an object * with the following properties: * * - line: The line number in the generated source. * - column: The column number in the generated source. * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the * closest element that is smaller than or greater than the one we are * searching for, respectively, if the exact element cannot be found. * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. * * and an object is returned with the following properties: * * - source: The original source file, or null. * - line: The line number in the original source, or null. * - column: The column number in the original source, or null. * - name: The original identifier, or null. */ BasicSourceMapConsumer.prototype.originalPositionFor = function SourceMapConsumer_originalPositionFor(aArgs) { var needle = { generatedLine: util.getArg(aArgs, 'line'), generatedColumn: util.getArg(aArgs, 'column') }; var index = this._findMapping(needle, this._generatedMappings, "generatedLine", "generatedColumn", util.compareByGeneratedPositionsDeflated, util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)); if (index >= 0) { var mapping = this._generatedMappings[index]; if (mapping.generatedLine === needle.generatedLine) { var source = util.getArg(mapping, 'source', null); if (source !== null) { source = this._sources.at(source); if (this.sourceRoot != null) { source = util.join(this.sourceRoot, source); } } var name = util.getArg(mapping, 'name', null); if (name !== null) { name = this._names.at(name); } return { source: source, line: util.getArg(mapping, 'originalLine', null), column: util.getArg(mapping, 'originalColumn', null), name: name }; } } return { source: null, line: null, column: null, name: null }; }; /** * Return true if we have the source content for every source in the source * map, false otherwise. */ BasicSourceMapConsumer.prototype.hasContentsOfAllSources = function BasicSourceMapConsumer_hasContentsOfAllSources() { if (!this.sourcesContent) { return false; } return this.sourcesContent.length >= this._sources.size() && !this.sourcesContent.some(function (sc) { return sc == null; }); }; /** * Returns the original source content. The only argument is the url of the * original source file. Returns null if no original source content is * availible. */ BasicSourceMapConsumer.prototype.sourceContentFor = function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { if (!this.sourcesContent) { return null; } if (this.sourceRoot != null) { aSource = util.relative(this.sourceRoot, aSource); } if (this._sources.has(aSource)) { return this.sourcesContent[this._sources.indexOf(aSource)]; } var url; if (this.sourceRoot != null && (url = util.urlParse(this.sourceRoot))) { // XXX: file:// URIs and absolute paths lead to unexpected behavior for // many users. We can help them out when they expect file:// URIs to // behave like it would if they were running a local HTTP server. See // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); if (url.scheme == "file" && this._sources.has(fileUriAbsPath)) { return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]; } if ((!url.path || url.path == "/") && this._sources.has("/" + aSource)) { return this.sourcesContent[this._sources.indexOf("/" + aSource)]; } } // This function is used recursively from // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we // don't want to throw if we can't find the source - we just want to // return null, so we provide a flag to exit gracefully. if (nullOnMissing) { return null; } else { throw new Error('"' + aSource + '" is not in the SourceMap.'); } }; /** * Returns the generated line and column information for the original source, * line, and column positions provided. The only argument is an object with * the following properties: * * - source: The filename of the original source. * - line: The line number in the original source. * - column: The column number in the original source. * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the * closest element that is smaller than or greater than the one we are * searching for, respectively, if the exact element cannot be found. * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. * * and an object is returned with the following properties: * * - line: The line number in the generated source, or null. * - column: The column number in the generated source, or null. */ BasicSourceMapConsumer.prototype.generatedPositionFor = function SourceMapConsumer_generatedPositionFor(aArgs) { var source = util.getArg(aArgs, 'source'); if (this.sourceRoot != null) { source = util.relative(this.sourceRoot, source); } if (!this._sources.has(source)) { return { line: null, column: null, lastColumn: null }; } source = this._sources.indexOf(source); var needle = { source: source, originalLine: util.getArg(aArgs, 'line'), originalColumn: util.getArg(aArgs, 'column') }; var index = this._findMapping(needle, this._originalMappings, "originalLine", "originalColumn", util.compareByOriginalPositions, util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)); if (index >= 0) { var mapping = this._originalMappings[index]; if (mapping.source === needle.source) { return { line: util.getArg(mapping, 'generatedLine', null), column: util.getArg(mapping, 'generatedColumn', null), lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }; } } return { line: null, column: null, lastColumn: null }; }; exports.BasicSourceMapConsumer = BasicSourceMapConsumer; /** * An IndexedSourceMapConsumer instance represents a parsed source map which * we can query for information. It differs from BasicSourceMapConsumer in * that it takes "indexed" source maps (i.e. ones with a "sections" field) as * input. * * The only parameter is a raw source map (either as a JSON string, or already * parsed to an object). According to the spec for indexed source maps, they * have the following attributes: * * - version: Which version of the source map spec this map is following. * - file: Optional. The generated file this source map is associated with. * - sections: A list of section definitions. * * Each value under the "sections" field has two fields: * - offset: The offset into the original specified at which this section * begins to apply, defined as an object with a "line" and "column" * field. * - map: A source map definition. This source map could also be indexed, * but doesn't have to be. * * Instead of the "map" field, it's also possible to have a "url" field * specifying a URL to retrieve a source map from, but that's currently * unsupported. * * Here's an example source map, taken from the source map spec[0], but * modified to omit a section which uses the "url" field. * * { * version : 3, * file: "app.js", * sections: [{ * offset: {line:100, column:10}, * map: { * version : 3, * file: "section.js", * sources: ["foo.js", "bar.js"], * names: ["src", "maps", "are", "fun"], * mappings: "AAAA,E;;ABCDE;" * } * }], * } * * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt */ function IndexedSourceMapConsumer(aSourceMap) { var sourceMap = aSourceMap; if (typeof aSourceMap === 'string') { sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); } var version = util.getArg(sourceMap, 'version'); var sections = util.getArg(sourceMap, 'sections'); if (version != this._version) { throw new Error('Unsupported version: ' + version); } this._sources = new ArraySet(); this._names = new ArraySet(); var lastOffset = { line: -1, column: 0 }; this._sections = sections.map(function (s) { if (s.url) { // The url field will require support for asynchronicity. // See https://github.com/mozilla/source-map/issues/16 throw new Error('Support for url field in sections not implemented.'); } var offset = util.getArg(s, 'offset'); var offsetLine = util.getArg(offset, 'line'); var offsetColumn = util.getArg(offset, 'column'); if (offsetLine < lastOffset.line || offsetLine === lastOffset.line && offsetColumn < lastOffset.column) { throw new Error('Section offsets must be ordered and non-overlapping.'); } lastOffset = offset; return { generatedOffset: { // The offset fields are 0-based, but we use 1-based indices when // encoding/decoding from VLQ. generatedLine: offsetLine + 1, generatedColumn: offsetColumn + 1 }, consumer: new SourceMapConsumer(util.getArg(s, 'map')) }; }); } IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; /** * The version of the source mapping spec that we are consuming. */ IndexedSourceMapConsumer.prototype._version = 3; /** * The list of original sources. */ Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { get: function get() { var sources = []; for (var i = 0; i < this._sections.length; i++) { for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { sources.push(this._sections[i].consumer.sources[j]); } }; return sources; } }); /** * Returns the original source, line, and column information for the generated * source's line and column positions provided. The only argument is an object * with the following properties: * * - line: The line number in the generated source. * - column: The column number in the generated source. * * and an object is returned with the following properties: * * - source: The original source file, or null. * - line: The line number in the original source, or null. * - column: The column number in the original source, or null. * - name: The original identifier, or null. */ IndexedSourceMapConsumer.prototype.originalPositionFor = function IndexedSourceMapConsumer_originalPositionFor(aArgs) { var needle = { generatedLine: util.getArg(aArgs, 'line'), generatedColumn: util.getArg(aArgs, 'column') }; // Find the section containing the generated position we're trying to map // to an original position. var sectionIndex = binarySearch.search(needle, this._sections, function (needle, section) { var cmp = needle.generatedLine - section.generatedOffset.generatedLine; if (cmp) { return cmp; } return needle.generatedColumn - section.generatedOffset.generatedColumn; }); var section = this._sections[sectionIndex]; if (!section) { return { source: null, line: null, column: null, name: null }; } return section.consumer.originalPositionFor({ line: needle.generatedLine - (section.generatedOffset.generatedLine - 1), column: needle.generatedColumn - (section.generatedOffset.generatedLine === needle.generatedLine ? section.generatedOffset.generatedColumn - 1 : 0), bias: aArgs.bias }); }; /** * Return true if we have the source content for every source in the source * map, false otherwise. */ IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = function IndexedSourceMapConsumer_hasContentsOfAllSources() { return this._sections.every(function (s) { return s.consumer.hasContentsOfAllSources(); }); }; /** * Returns the original source content. The only argument is the url of the * original source file. Returns null if no original source content is * available. */ IndexedSourceMapConsumer.prototype.sourceContentFor = function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { for (var i = 0; i < this._sections.length; i++) { var section = this._sections[i]; var content = section.consumer.sourceContentFor(aSource, true); if (content) { return content; } } if (nullOnMissing) { return null; } else { throw new Error('"' + aSource + '" is not in the SourceMap.'); } }; /** * Returns the generated line and column information for the original source, * line, and column positions provided. The only argument is an object with * the following properties: * * - source: The filename of the original source. * - line: The line number in the original source. * - column: The column number in the original source. * * and an object is returned with the following properties: * * - line: The line number in the generated source, or null. * - column: The column number in the generated source, or null. */ IndexedSourceMapConsumer.prototype.generatedPositionFor = function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { for (var i = 0; i < this._sections.length; i++) { var section = this._sections[i]; // Only consider this section if the requested source is in the list of // sources of the consumer. if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { continue; } var generatedPosition = section.consumer.generatedPositionFor(aArgs); if (generatedPosition) { var ret = { line: generatedPosition.line + (section.generatedOffset.generatedLine - 1), column: generatedPosition.column + (section.generatedOffset.generatedLine === generatedPosition.line ? section.generatedOffset.generatedColumn - 1 : 0) }; return ret; } } return { line: null, column: null }; }; /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and * `this.__originalMappings` properties). */ IndexedSourceMapConsumer.prototype._parseMappings = function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { this.__generatedMappings = []; this.__originalMappings = []; for (var i = 0; i < this._sections.length; i++) { var section = this._sections[i]; var sectionMappings = section.consumer._generatedMappings; for (var j = 0; j < sectionMappings.length; j++) { var mapping = sectionMappings[i]; var source = section.consumer._sources.at(mapping.source); if (section.consumer.sourceRoot !== null) { source = util.join(section.consumer.sourceRoot, source); } this._sources.add(source); source = this._sources.indexOf(source); var name = section.consumer._names.at(mapping.name); this._names.add(name); name = this._names.indexOf(name); // The mappings coming from the consumer for the section have // generated positions relative to the start of the section, so we // need to offset them to be relative to the start of the concatenated // generated file. var adjustedMapping = { source: source, generatedLine: mapping.generatedLine + (section.generatedOffset.generatedLine - 1), generatedColumn: mapping.column + (section.generatedOffset.generatedLine === mapping.generatedLine) ? section.generatedOffset.generatedColumn - 1 : 0, originalLine: mapping.originalLine, originalColumn: mapping.originalColumn, name: name }; this.__generatedMappings.push(adjustedMapping); if (typeof adjustedMapping.originalLine === 'number') { this.__originalMappings.push(adjustedMapping); } }; }; quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); quickSort(this.__originalMappings, util.compareByOriginalPositions); }; exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; }); //# sourceMappingURL=source-map-consumer-compiled.js.map<|fim▁end|>
*/ SourceMapConsumer.prototype._parseMappings = function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
<|file_name|>powGenerator.py<|end_file_name|><|fim▁begin|>from findSentence import sentenceGrab from phoneticWords import findPhonetics from phoneticIndex import findPhoneticIndex from random import randint from math import floor import sys def main(): library = sys.argv[1] subject = sys.argv[2] dictionary = "/usr/share/dict/words" phonetics = findPhonetics(subject, dictionary) if len(phonetics) == 0: print("Could not find any phonetic words.") return nearPhoneticNum = floor((phonetics[0][1] + phonetics[len(phonetics)-1][1]) / 2) phonetics = [i for i in phonetics if i[1] <= nearPhoneticNum] sentences = [] tries = 10 index = 0 while len(sentences) == 0 and index <= tries: if len(phonetics) == 0: print("No more phonetic words. Ending") return index += 1 punWord = phonetics[randint(0, floor(len(phonetics)/2))][0] print(punWord) sentences = sentenceGrab(punWord, library, True) if len(sentences) == 0: phonetics = [i for i in phonetics if i[0] != punWord] print("Could not find sentence... Trying again") if index >= tries: print("Reached maximum tries. Ending") return punSentence = sentences[randint(0, len(sentences) - 1)]<|fim▁hole|> punIndex = findPhoneticIndex(subject, punWord) punSentence = punSentence[0:sentenceIndex + punIndex] + subject + punSentence[sentenceIndex + punIndex + len(subject):len(punSentence)] print(punSentence) if __name__ == "__main__": main()<|fim▁end|>
sentenceIndex = punSentence.find(punWord)
<|file_name|>newtype-struct-drop-run.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(managed_boxes)] // Make sure the destructor is run for newtype structs. <|fim▁hole|> #[unsafe_destructor] impl Drop for Foo { fn drop(&mut self) { let Foo(i) = *self; i.set(23); } } pub fn main() { let y = @Cell::new(32); { let _x = Foo(y); } assert_eq!(y.get(), 23); }<|fim▁end|>
use std::cell::Cell; struct Foo(@Cell<int>);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" Initialization script for restapi for the application. """ from flask import Blueprint from app.common.logging import setup_logging <|fim▁hole|># Setup logger # api_log = setup_logging(__name__, 'logs/api.log', maxFilesize=1000000, # backup_count=5) from . import views, errors<|fim▁end|>
api = Blueprint('api', __name__)
<|file_name|>gen_zpaql.rs<|end_file_name|><|fim▁begin|>use zpaql::{ZPAQLOp, Loc, Reg, OtherReg, SwapLoc}; use ir::{IR, IRVar, IROp, IRUOp}; use options; use std::collections::HashMap; /// keeps track of variable copies in registers or memory locations pub struct Cache { pub last_hold: HashMap<Loc, IRVar>, } impl Cache { /// a modification of a register also needs to remove the cache entry for the location in points to in memory pub fn remove_reg(&mut self, loc: &Loc) { match loc { &Loc::Reg(Reg::OtherReg(OtherReg::D)) => { self.last_hold.remove(&Loc::Reg(Reg::OtherReg(OtherReg::D))); self.last_hold.remove(&Loc::HD); }, &Loc::Reg(Reg::OtherReg(OtherReg::C)) => { self.last_hold.remove(&Loc::Reg(Reg::OtherReg(OtherReg::C))); self.last_hold.remove(&Loc::MC); }, &Loc::Reg(Reg::OtherReg(OtherReg::B)) => { self.last_hold.remove(&Loc::Reg(Reg::OtherReg(OtherReg::B))); self.last_hold.remove(&Loc::MB); }, x => { self.last_hold.remove(x); }, } } /// invalidate all cache entries for R-variable copies because of a new value pub fn delete_references(&mut self, varid: u8) { let mut new = HashMap::<Loc, IRVar>::new(); for (k, v) in self.last_hold.iter() { match v { &IRVar::H{index_varid, orig_name: _} if index_varid == varid => {}, &IRVar::M{index_varid} if index_varid == varid => {}, &IRVar::Ht{stack_offset: _, local, orig_name: _} if (local && varid == 0) || (!local && varid == 252) => {}, _ => { new.insert(k.clone(), v.clone()); }, } } self.last_hold = new; } /// delete cache entries for this variable pub fn delete(&mut self, irvar: &IRVar) { let mut new = HashMap::<Loc, IRVar>::new(); for (k, v) in self.last_hold.iter() { if v != &(irvar.tovar()) { new.insert(k.clone(), v.clone()); } } self.last_hold = new; } /// delete cache entries for this variable but skip the mentioned location pub fn delete_not(&mut self, irvar: &IRVar, loc: &Loc) { let mut new = HashMap::<Loc, IRVar>::new(); for (k, v) in self.last_hold.iter() { if v != &(irvar.tovar()) || k == loc { new.insert(k.clone(), v.clone()); } } self.last_hold = new; } /// test if the location holds a copy of the variable pub fn is_loc(&self, loc: &Loc, irvar: &IRVar) -> bool { match self.last_hold.get(loc) { Some(&IRVar::Ht{stack_offset: so, local: lo, ref orig_name}) => { match irvar { &IRVar::Ht{stack_offset, local, orig_name: _} => stack_offset == so && local == lo, // needed to ignore orig_name vv => vv == &IRVar::Ht{stack_offset: so, local: lo, orig_name: orig_name.clone()}, } }, Some(&IRVar::H{index_varid: iv, ref orig_name}) => { match irvar { &IRVar::H{index_varid, orig_name: _} => index_varid == iv, // needed to ignore orig_name vv => vv == &IRVar::H{index_varid: iv, orig_name: orig_name.clone()}, } }, Some(v) => irvar == v, _ => false, } } } /// compile IR code (which works on H, M and R) to ZPAQL code by using the registers A-D pub fn emit_zpaql(irc: &[IR], ch: &mut Cache, optioncfg: &options::Options) -> Vec<ZPAQLOp> { let mut code = vec![]; for op in irc { match op.convert() { // write original IR statement as comment IR::Block{stmts: _} => {}, IR::Comment{comment: _} => {}, other => { if optioncfg.comments { code.push(ZPAQLOp::Comment{comment: format!(" {}", other)}); } }, } match op.convert() { IR::Label{ref label} => { ch.last_hold.clear(); // label is jump destination, can't use any cache from before code.push(ZPAQLOp::Label{label: label.clone(), position: 0}); }, // position will be set afterwards IR::GoTo{ref label} => { code.push(ZPAQLOp::GoTo{label: label.clone()}); }, IR::Error => { code.push(ZPAQLOp::Error); }, IR::Halt => { code.push(ZPAQLOp::Halt); }, IR::Comment{ref comment} => { code.push(ZPAQLOp::Comment{comment: comment.clone()}); }, IR::Out{ref var} => { code.extend_from_slice(&assign_var_to_loc(var, &Loc::Reg(Reg::A), ch)); code.push(ZPAQLOp::Out); }, IR::If{ref cond_var, ref goto_label} => { code.extend_from_slice(&assign_var_to_loc(cond_var, &Loc::Reg(Reg::A), ch)); code.push(ZPAQLOp::AeqN{n: 0}); code.push(ZPAQLOp::JT{n: 3}); // cond is false, so jump over the jump, i.e. incr. PC by 3 more than normal given that GoTo will be a LJ code.push(ZPAQLOp::GoTo{label: goto_label.clone()}); }, IR::IfN{ref cond_var, ref goto_label} => { code.extend_from_slice(&assign_var_to_loc(cond_var, &Loc::Reg(Reg::A), ch)); code.push(ZPAQLOp::AeqN{n: 0}); code.push(ZPAQLOp::JF{n: 3}); // cond is true, so jump over the jump, i.e. incr. PC by 3 more than normal given that GoTo will be a LJ code.push(ZPAQLOp::GoTo{label: goto_label.clone()}); }, IR::IfEq{ref val1, ref val2, ref goto_label} => { code.extend_from_slice(&assign_var_to_loc(val1, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch)); code.extend_from_slice(&assign_var_to_loc(val2, &Loc::Reg(Reg::A), ch)); code.push(ZPAQLOp::Aeq(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::JF{n: 3}); // cond is true, so jump over the jump, i.e. incr. PC by 3 more than normal given that GoTo will be a LJ code.push(ZPAQLOp::GoTo{label: goto_label.clone()}); }, IR::IfNeq{ref val1, ref val2, ref goto_label} => { code.extend_from_slice(&assign_var_to_loc(val1, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch)); code.extend_from_slice(&assign_var_to_loc(val2, &Loc::Reg(Reg::A), ch)); code.push(ZPAQLOp::Aeq(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::JT{n: 3}); // cond is true, so jump over the jump, i.e. incr. PC by 3 more than normal given that GoTo will be a LJ code.push(ZPAQLOp::GoTo{label: goto_label.clone()}); }, IR::Block{ref stmts} => { code.extend_from_slice(&emit_zpaql(stmts, ch, optioncfg)) }, // recursively IR::Assign{ref target, ref source} => { if target != source { match target.tovar() { IRVar::Var{varid: _} => { // assignments to R can only come from A code.extend_from_slice(&assign_var_to_loc(source, &Loc::Reg(Reg::A), ch)); code.extend_from_slice(&assign_loc_to_var(target, &Loc::Reg(Reg::A), ch )); }, IRVar::H{index_varid: _, orig_name: _} | IRVar::Ht{stack_offset: _, local: _, orig_name: _} | IRVar::Hx{addr: _} => { match source.tovar() { IRVar::Number{value: _} | IRVar::Var{varid: _} | IRVar::M{index_varid: _} | IRVar::Mx{addr: _} => { let (zc, loc) = gen_loc_for_var(target, ch); // first make target ready, so it can be efficiently zeroed or increased code.extend_from_slice(&zc); ch.delete(target); // because target will get a new value code.extend_from_slice(&assign_var_to_loc(source, &loc, ch)); // copy variable to target location ch.last_hold.insert(loc.clone(), target.clone()); // performs better then keeping loc->source mapping // otherwise one could also try something like: // let (zc, loc) = gen_loc_for_var(source); // code.extend_from_slice(&zc); // code.extend_from_slice(&assign_loc_to_var(target, &loc)); }, IRVar::Hx{addr: _} | IRVar::Ht{stack_offset: _, local: _, orig_name: _} | IRVar::H{index_varid: _, orig_name: _} => { // use C to hold the value because A could be needed during the calculation code.extend_from_slice(&assign_var_to_loc(source, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch)); code.extend_from_slice(&assign_loc_to_var(target, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch)); }, _ => unreachable!(), } }, IRVar::M{index_varid: _} | IRVar::Mx{addr: _}=> { let (zc, loc) = gen_loc_for_var(target, ch); // improvement over var->loc->var code.extend_from_slice(&zc); ch.delete(target); code.extend_from_slice(&assign_var_to_loc(source, &loc, ch)); ch.last_hold.insert(loc.clone(), target.clone()); // not measured yet if it makes a big difference or could be omitted }, x => { error!("can't assign to {}", x); panic!("error") }, } } }, IR::Assign1Op{ref target, uop, ref source} => { if target == source { match target.tovar() { IRVar::Var{varid: _} => { // can not be increased in place, needs A code.extend_from_slice(&assign_var_to_loc(source, &Loc::Reg(Reg::A), ch)); match uop { IRUOp::Not => { // (== 0) code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); code.push(ZPAQLOp::AeqN{n: 1}); code.push(ZPAQLOp::JT{n: 1}); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); },<|fim▁hole|> code.extend_from_slice(&assign_loc_to_var(target, &Loc::Reg(Reg::A), ch)); }, _ => { // can be modified in place let (zc, loc) = gen_loc_for_var(target, ch); ch.delete_not(target, &loc); code.extend_from_slice(&zc); match uop { IRUOp::Not => { // (== 0) code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); ch.last_hold.insert(Loc::Reg(Reg::A), IRVar::Number{value: 0}); code.push(ZPAQLOp::Aeq(loc.clone()) ); code.push(ZPAQLOp::Zero(loc.clone())); code.push(ZPAQLOp::JF{n: 1}); code.push(ZPAQLOp::Inc(loc.clone())); }, IRUOp::Invert => { code.push(ZPAQLOp::Not(loc.clone() )); }, // bitwise ~ IRUOp::USub => { code.push(ZPAQLOp::Not(loc.clone() )); code.push(ZPAQLOp::Inc(loc.clone()) ); }, // -x == ~x + 1 } ch.remove_reg(&loc); ch.last_hold.insert(loc.clone(), target.clone()); }, } } else { // first copied to A, then calculated and then assigned to target code.extend_from_slice(&assign_var_to_loc(source, &Loc::Reg(Reg::A), ch)); match uop { IRUOp::Not => { // (== 0) code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); code.push(ZPAQLOp::AeqN{n: 1}); code.push(ZPAQLOp::JT{n: 1}); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); }, IRUOp::Invert => { code.push(ZPAQLOp::Not(Loc::Reg(Reg::A)) ); }, // bitwise ~ IRUOp::USub => { code.push(ZPAQLOp::Not(Loc::Reg(Reg::A)) ); code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); }, // -x == ~x + 1 } ch.remove_reg(&Loc::Reg(Reg::A)); code.extend_from_slice(&assign_loc_to_var(target, &Loc::Reg(Reg::A), ch)); } }, IR::Assign2Op{ref target, ref val1, op, ref val2} => { if target == val1 && (op == IROp::Add || op == IROp::Sub) && (val2 == &IRVar::Number{value: 0} || val2 == &IRVar::Number{value: 1}) { match val2 { // val1 = val1 + 1 (or + 0) &IRVar::Number{value: 0} => {}, // nothing to do _ => { match target.tovar() { IRVar::Var{varid: _} => { // assignments to R must go though A code.extend_from_slice(&assign_var_to_loc(val1, &Loc::Reg(Reg::A), ch)); if op == IROp::Add { code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); } else { code.push(ZPAQLOp::Dec(Loc::Reg(Reg::A))); } ch.remove_reg(&Loc::Reg(Reg::A)); code.extend_from_slice(&assign_loc_to_var(target, &Loc::Reg(Reg::A), ch)); }, _ => { // other locations of the target variable can be inc/decreased directly let (zc, loc) = gen_loc_for_var(target, ch); code.extend_from_slice(&zc); ch.delete_not(target, &loc); if op == IROp::Add { code.push(ZPAQLOp::Inc(loc.clone())); } else { code.push(ZPAQLOp::Dec(loc.clone())); } ch.remove_reg(&loc); ch.last_hold.insert(loc.clone(), target.clone()); }, } }, } } else { // save val2 in C and val1 in A code.extend_from_slice(&assign_var_to_loc(val2, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch)); code.extend_from_slice(&assign_var_to_loc(val1, &Loc::Reg(Reg::A), ch)); // calculate A = A <op> C match op { IROp::Add => { code.push(ZPAQLOp::Aadd(Loc::Reg(Reg::OtherReg(OtherReg::C)))); }, IROp::Sub => { code.push(ZPAQLOp::Asub(Loc::Reg(Reg::OtherReg(OtherReg::C)))); }, IROp::Mult => { code.push(ZPAQLOp::Amult(Loc::Reg(Reg::OtherReg(OtherReg::C)))); }, IROp::Div => { code.push(ZPAQLOp::Adiv(Loc::Reg(Reg::OtherReg(OtherReg::C)))); }, IROp::Pow => { code.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::B)), source: Loc::Reg(Reg::A)}); code.push(ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: 1}); // loop start code.push(ZPAQLOp::SwapA(SwapLoc::OtherReg(OtherReg::C))); code.push(ZPAQLOp::AgtN{n: 0}); code.push(ZPAQLOp::JF{n: 5}); // PCnextInstr += ((N+128) mod 256) - 128 code.push(ZPAQLOp::SwapA(SwapLoc::OtherReg(OtherReg::C))); code.push(ZPAQLOp::Amult(Loc::Reg(Reg::OtherReg(OtherReg::B)))); code.push(ZPAQLOp::Dec(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::JMP{n: -10}); // jump finish code.push(ZPAQLOp::SwapA(SwapLoc::OtherReg(OtherReg::C))); }, IROp::LShift => { code.push(ZPAQLOp::Alshift(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); }, IROp::RShift => { code.push(ZPAQLOp::Arshift(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); }, IROp::Mod => { code.push(ZPAQLOp::Amod(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); }, IROp::BitOr => { code.push(ZPAQLOp::Aor(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); }, IROp::BitXor => { code.push(ZPAQLOp::Axor(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); }, IROp::BitAnd => { code.push(ZPAQLOp::Aand(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); }, IROp::Or => { code.push(ZPAQLOp::AeqN{n: 0}); code.push(ZPAQLOp::JF{n: 1}); code.push(ZPAQLOp::Set{target: Loc::Reg(Reg::A), source: Loc::Reg(Reg::OtherReg(OtherReg::C))} ); }, IROp::And => { code.push(ZPAQLOp::AeqN{n: 0}); code.push(ZPAQLOp::JT{n: 1}); code.push(ZPAQLOp::Set{target: Loc::Reg(Reg::A), source: Loc::Reg(Reg::OtherReg(OtherReg::C))} ); }, IROp::Eq => { code.push(ZPAQLOp::Aeq(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A)) ); code.push(ZPAQLOp::JF{n: 1}); code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A)) ); }, IROp::NotEq => { code.push(ZPAQLOp::Aeq(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); code.push(ZPAQLOp::JT{n: 1}); code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A)) ); }, IROp::Lt => { code.push(ZPAQLOp::Alt(Loc::Reg(Reg::OtherReg(OtherReg::C))) ); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A) )); code.push(ZPAQLOp::JF{n: 1}); code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A) )); }, IROp::LtE => { code.push(ZPAQLOp::Aeq(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::JT{n: 4}); code.push(ZPAQLOp::Alt(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); code.push(ZPAQLOp::JF{n: 2}); code.push(ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: 1}); }, IROp::Gt => { code.push(ZPAQLOp::Agt(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); code.push(ZPAQLOp::JF{n: 1}); code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); }, IROp::GtE => { code.push(ZPAQLOp::Aeq(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::JT{n: 4}); code.push(ZPAQLOp::Agt(Loc::Reg(Reg::OtherReg(OtherReg::C)))); code.push(ZPAQLOp::Zero(Loc::Reg(Reg::A))); code.push(ZPAQLOp::JF{n: 2}); code.push(ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: 1}); }, } ch.remove_reg(&Loc::Reg(Reg::A)); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::C))); // assign A to target code.extend_from_slice(&assign_loc_to_var(target, &Loc::Reg(Reg::A), ch)); } }, IR::MarkTempVarStart | IR::MarkTempVarEnd => {}, x => { error!("can not emit zpaql for (non-converted?) IR: {}", x); panic!("error"); } } } code } /// assign value to location, keeps track in the cache and overwrites A if value>255 fn calc_number(value: u32, loc: &Loc, ch: &mut Cache) -> Vec<ZPAQLOp> { match ch.last_hold.get(&loc) { Some(&IRVar::Number{value: v}) if v == value => { return vec![]; }, Some(&IRVar::Number{value: v}) if value > 0 && v == value - 1 => { ch.remove_reg(&loc); ch.last_hold.insert(loc.clone(), IRVar::Number{value: value}); return vec![ZPAQLOp::Inc(loc.clone())]; }, Some(&IRVar::Number{value: v}) if value < 4294967295 && v == value + 1 => { ch.remove_reg(&loc); ch.last_hold.insert(loc.clone(), IRVar::Number{value: value}); return vec![ZPAQLOp::Dec(loc.clone())]; }, _ => {}, } let vecc = if value == 4294967295 { vec![ZPAQLOp::Zero(loc.clone()), ZPAQLOp::Dec(loc.clone())] } else if value == 4294967294 { vec![ZPAQLOp::Zero(loc.clone()), ZPAQLOp::Dec(loc.clone()), ZPAQLOp::Dec(loc.clone())] } else if value == 2147483648 { ch.remove_reg(&Loc::Reg(Reg::A)); match loc { &Loc::Reg(Reg::A) => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: 1}, ZPAQLOp::AlshiftN{n: 31}], _ => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: 1}, ZPAQLOp::AlshiftN{n: 31}, ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)} ], } } else if value == 2147483647 { ch.remove_reg(&Loc::Reg(Reg::A)); match loc { &Loc::Reg(Reg::A) => vec![ZPAQLOp::Zero(Loc::Reg(Reg::A)), ZPAQLOp::Dec(Loc::Reg(Reg::A)), ZPAQLOp::ArshiftN{n: 1}], _ => vec![ZPAQLOp::Zero(Loc::Reg(Reg::A)), ZPAQLOp::Dec(Loc::Reg(Reg::A)), ZPAQLOp::ArshiftN{n: 1}, ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)}], } } else if value == 0 { vec![ZPAQLOp::Zero(loc.clone())] } else if value < 256 { vec![ZPAQLOp::SetN{target: loc.clone(), n: value as u8}] } else if value < 65536 { ch.remove_reg(&Loc::Reg(Reg::A)); match loc { &Loc::Reg(Reg::A) => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: (value/256u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: (value%256u32) as u8}], _ => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: (value/256u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: (value%256u32) as u8}, ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)}], } } else if value < 16777216 { ch.remove_reg(&Loc::Reg(Reg::A)); match loc { &Loc::Reg(Reg::A) => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: (value/65536u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: ((value%65536u32)/256u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: (value%256u32) as u8}], _ => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: (value/65536u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: ((value%65536u32)/256u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: (value%256u32) as u8}, ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)}], } } else { ch.remove_reg(&Loc::Reg(Reg::A)); match loc { &Loc::Reg(Reg::A) => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: (value/16777216u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: ((value%16777216u32)/65536u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: ((value%65536u32)/256u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: (value%256u32) as u8}], _ => vec![ZPAQLOp::SetN{target: Loc::Reg(Reg::A), n: (value/16777216u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: ((value%16777216u32)/65536u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: ((value%65536u32)/256u32) as u8}, ZPAQLOp::AlshiftN{n: 8}, ZPAQLOp::AaddN{n: (value%256u32) as u8}, ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)}], } }; ch.remove_reg(&loc); ch.last_hold.insert(loc.clone(), IRVar::Number{value: value}); vecc.into_iter().filter(|t| match t { &ZPAQLOp::AaddN{n: 0} => false, _ => true, } ).collect() } /// returns the location of a variable and needed calculations, can overwrite A, C and D, keeps track in the cache fn gen_loc_for_var(var: &IRVar, ch: &mut Cache) -> (Vec<ZPAQLOp>, Loc) { match &(var.tovar()) { &IRVar::H{index_varid, orig_name: _} => { if ch.is_loc(&Loc::HD, &(var.tovar())) || ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::D)), &IRVar::Var{varid: index_varid}) { (vec![], Loc::HD) } else { ch.last_hold.insert(Loc::Reg(Reg::OtherReg(OtherReg::D)), IRVar::Var{varid: index_varid}); ch.last_hold.insert(Loc::HD, var.tovar()); if ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: index_varid}){ (vec![ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::D)), source: Loc::Reg(Reg::A)}], Loc::HD) } else { (vec![ZPAQLOp::SetR{target: Reg::OtherReg(OtherReg::D), r: index_varid}], Loc::HD) } } }, &IRVar::Ht{stack_offset, local, ref orig_name} => { if ch.is_loc(&Loc::HD, &(var.tovar())) { (vec![], Loc::HD) } else if stack_offset > 0 && ch.is_loc(&Loc::HD, &IRVar::Ht{stack_offset: stack_offset-1, local: local, orig_name: orig_name.clone()}) { ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); ch.last_hold.insert(Loc::HD, var.tovar()); (vec![ZPAQLOp::Inc(Loc::Reg(Reg::OtherReg(OtherReg::D)))], Loc::HD) } else if ch.is_loc(&Loc::HD, &IRVar::Ht{stack_offset: stack_offset+1, local: local, orig_name: orig_name.clone()}) { ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); ch.last_hold.insert(Loc::HD, var.tovar()); (vec![ZPAQLOp::Dec(Loc::Reg(Reg::OtherReg(OtherReg::D)))], Loc::HD) } else { let mut v = vec![]; if !ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: if local { 0 } else { 252 }}) { v.push(ZPAQLOp::SetR{target: Reg::A, r: if local { 0 } else { 252 } }); } if stack_offset == 1 { v.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); } else if stack_offset < 256 { v.push(ZPAQLOp::AaddN{n: stack_offset as u8}); } else { // @TODO: use calc_number(offset) to add on r0 panic!("not implemented") } v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::D)), source: Loc::Reg(Reg::A)}); ch.remove_reg(&Loc::Reg(Reg::A)); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); ch.last_hold.insert(Loc::HD, var.tovar()); (v, Loc::HD) } }, &IRVar::Hx{addr} => { if ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::D)), &IRVar::Number{value: addr}) | ch.is_loc(&Loc::HD, &(var.tovar())) { (vec![], Loc::HD) } else { let v = calc_number(addr, &Loc::Reg(Reg::OtherReg(OtherReg::D)), ch); ch.last_hold.insert(Loc::HD, var.tovar()); (v, Loc::HD) } }, &IRVar::Mx{addr} => { if ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::C)), &IRVar::Number{value: addr}) | ch.is_loc(&Loc::MC, &(var.tovar())) { (vec![], Loc::MC) } else { let v = calc_number(addr, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch); ch.last_hold.insert(Loc::MC, var.tovar()); (v, Loc::MC) } }, &IRVar::M{index_varid} => { if ch.is_loc(&Loc::MC, &(var.tovar())) || ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::C)), &IRVar::Var{varid: index_varid}) { (vec![], Loc::MC) } else { ch.last_hold.insert(Loc::Reg(Reg::OtherReg(OtherReg::C)), IRVar::Var{varid: index_varid}); ch.last_hold.insert(Loc::MC, var.tovar()); if ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: index_varid}){ (vec![ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::C)), source: Loc::Reg(Reg::A)}], Loc::MC) } else { (vec![ZPAQLOp::SetR{target: Reg::OtherReg(OtherReg::C), r: index_varid}], Loc::MC) } } }, _ => { error!("no clear location for {}", var); panic!("error") }, } } /// copy value of variable to the location, keeps track in the cache and /// can overwrite D, B and A on the way, so if variable is on H, loc can't be HD and if variable is on M, loc can't be MB fn assign_var_to_loc(var: &IRVar, loc: &Loc, ch: &mut Cache) -> Vec<ZPAQLOp> { if ch.is_loc(loc, &(var.tovar())) { vec![] } else if ch.is_loc(&Loc::Reg(Reg::A), &(var.tovar())) { ch.remove_reg(loc); ch.last_hold.insert(loc.clone(), var.tovar()); vec![ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)} ] } else { let v = match &(var.tovar()) { &IRVar::Number{value} => { // Big numbers need to be computed calc_number(value, loc, ch) }, &IRVar::Var{varid} => { match loc { &Loc::Reg(ref reg) => { if ch.is_loc(&Loc::Reg(Reg::A), &(var.tovar())) { ch.last_hold.insert(Loc::Reg(reg.clone()), var.tovar()); vec![ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)} ] } else { ch.last_hold.insert(Loc::Reg(reg.clone()), var.tovar()); vec![ZPAQLOp::SetR{target: reg.clone(), r: varid}] } }, _ => { if ch.is_loc(&Loc::Reg(Reg::A), &(var.tovar())) { vec![ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)} ] } else { ch.last_hold.insert(Loc::Reg(Reg::A), var.tovar()); vec![ZPAQLOp::SetR{target: Reg::A, r: varid}, ZPAQLOp::Set{target: loc.clone(), source: Loc::Reg(Reg::A)} ] } }, } }, &IRVar::H{index_varid, orig_name: _} => { match loc { &Loc::HD => { error!("Value of D would be overwritten before setting HD") ; panic!("error") }, _ => {}, } let mut m = vec![]; if !ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::D)), &IRVar::Var{varid: index_varid}) && !ch.is_loc(&Loc::HD, &(var.tovar())) { if ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: index_varid}){ m.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::D)), source: Loc::Reg(Reg::A)}); } else { m.push(ZPAQLOp::SetR{target: Reg::OtherReg(OtherReg::D), r: index_varid}); } ch.last_hold.insert(Loc::Reg(Reg::OtherReg(OtherReg::D)), IRVar::Var{varid: index_varid} ); ch.last_hold.insert(Loc::HD, var.tovar()); } m.push(ZPAQLOp::Set{target: loc.clone(), source: Loc::HD}); m }, &IRVar::Ht{stack_offset, local, ref orig_name} => { let mut v = vec![]; match loc { &Loc::HD => { error!("Value of D would be overwritten before setting HD") ; panic!("error") }, _ => {}, } if stack_offset > 0 && ch.is_loc(&Loc::HD, &IRVar::Ht{stack_offset: stack_offset-1, local: local, orig_name: orig_name.clone()}) { ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); v.push(ZPAQLOp::Inc(Loc::Reg(Reg::OtherReg(OtherReg::D)))); ch.last_hold.insert(Loc::HD, var.tovar()); } else if ch.is_loc(&Loc::HD, &IRVar::Ht{stack_offset: stack_offset+1, local: local, orig_name: orig_name.clone()}) { ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); v.push(ZPAQLOp::Dec(Loc::Reg(Reg::OtherReg(OtherReg::D)))); ch.last_hold.insert(Loc::HD, var.tovar()); } else if !ch.is_loc(&Loc::HD, &(var.tovar())) { if !ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: if local { 0 } else { 252 } }) { v.push(ZPAQLOp::SetR{target: Reg::A, r: if local { 0 } else { 252 } }); } if stack_offset == 1 { v.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); } else if stack_offset < 256 { v.push(ZPAQLOp::AaddN{n: stack_offset as u8}); } else { // @TODO: use calc_number(offset) to add on r0 panic!("not implemented") } ch.remove_reg(&Loc::Reg(Reg::A)); v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::D)), source: Loc::Reg(Reg::A)}); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); ch.last_hold.insert(Loc::HD, var.tovar()); } v.push(ZPAQLOp::Set{target: loc.clone(), source: Loc::HD}); v }, &IRVar::Hx{addr} => { let mut v = if ch.is_loc(&Loc::HD, &(var.tovar())) || ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::D)), &IRVar::Number{value: addr}) { vec![] } else { calc_number(addr, &Loc::Reg(Reg::OtherReg(OtherReg::D)), ch) }; ch.last_hold.insert(Loc::HD, var.tovar()); match loc { &Loc::HD => { error!("Value of D would be overwritten before setting HD") ; panic!("error") }, _ => {}, } v.push(ZPAQLOp::Set{target: loc.clone(), source: Loc::HD}); v }, &IRVar::Mx{addr} => { let mut v = if ch.is_loc(&Loc::MB, &(var.tovar())) || ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::B)), &IRVar::Number{value: addr}) { vec![] } else { calc_number(addr, &Loc::Reg(Reg::OtherReg(OtherReg::B)), ch) }; ch.last_hold.insert(Loc::MB, var.tovar()); match loc { &Loc::MB => { error!("Value of B would be overwritten before setting MB") ; panic!("error") }, _ => {}, } v.push(ZPAQLOp::Set{target: loc.clone(), source: Loc::MB}); v }, &IRVar::M{index_varid} => { match loc { &Loc::MB => { error!("Value of B would be overwritten before setting MB") ; panic!("error") }, _ => {}, } let mut m = vec![]; if !ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::B)), &IRVar::Var{varid: index_varid}) && !ch.is_loc(&Loc::MB, &(var.tovar())) { if ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: index_varid}){ m.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::B)), source: Loc::Reg(Reg::A)}); } else { m.push(ZPAQLOp::SetR{target: Reg::OtherReg(OtherReg::B), r: index_varid}); } ch.last_hold.insert(Loc::Reg(Reg::OtherReg(OtherReg::B)), IRVar::Var{varid: index_varid} ); ch.last_hold.insert(Loc::MB, var.tovar()); } m.push(ZPAQLOp::Set{target: loc.clone(), source: Loc::MB}); m }, _ => unreachable!(), }; ch.remove_reg(loc); ch.last_hold.insert(loc.clone(), var.tovar()); v } } // @TODO: maybe sometimes swap (<>) can be used if chache entries exist to to preserve them by swapping back afterwards /// copy value of location into the location of the variable, keeps track in the cache /// and can overwrite D, C and A, so if variable is on H, loc can't be HD or D and if variable is on M, loc can't be C or MC fn assign_loc_to_var(var: &IRVar, loc: &Loc, ch: &mut Cache) -> Vec<ZPAQLOp> { if ch.last_hold.get(loc).is_some() && ch.last_hold.get(loc).unwrap() == var { return vec![]; // if optimisations are to be turned off, also this case would have to be skipped } match &(var.tovar()) { &IRVar::Number{value: _} => { error!("impossible to assign a value to a number"); panic!("error") }, &IRVar::Var{varid} => { let mut v = vec![]; if loc != &Loc::Reg(Reg::A) && !(ch.last_hold.get(&Loc::Reg(Reg::A)).is_some() && ch.last_hold.get(&Loc::Reg(Reg::A)) == ch.last_hold.get(loc)) { v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::A), source: loc.clone()}); } v.push(ZPAQLOp::RsetA{n: varid}); ch.delete(var); ch.delete_references(varid); ch.last_hold.insert(loc.clone(), var.tovar()); ch.last_hold.insert(Loc::Reg(Reg::A), var.tovar()); v }, &IRVar::H{index_varid, orig_name: _} => { if loc == &Loc::Reg(Reg::OtherReg(OtherReg::D)) || loc == &Loc::HD { error!("would overwrite source"); panic!("error") } let mut v = vec![]; if !ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::D)), &IRVar::Var{varid: index_varid}) { v.push(ZPAQLOp::SetR{target: Reg::OtherReg(OtherReg::D), r: index_varid}); ch.last_hold.insert(Loc::Reg(Reg::OtherReg(OtherReg::D)), IRVar::Var{varid: index_varid}); } v.push(ZPAQLOp::Set{target: Loc::HD, source: loc.clone()}); ch.delete(var); ch.last_hold.insert(loc.clone(), var.tovar()); ch.last_hold.insert(Loc::HD, var.tovar()); v }, &IRVar::Ht{stack_offset, local, ref orig_name} => { let mut v = vec![]; if loc == &Loc::Reg(Reg::A) { v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::C)), source: Loc::Reg(Reg::A)}); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::C))); } else if loc == &Loc::Reg(Reg::OtherReg(OtherReg::D)) || loc == &Loc::HD { error!("would overwrite source"); panic!("error") } if stack_offset > 0 && ch.is_loc(&Loc::HD, &IRVar::Ht{stack_offset: stack_offset-1, local: local, orig_name: orig_name.clone()}) { ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); v.push(ZPAQLOp::Inc(Loc::Reg(Reg::OtherReg(OtherReg::D)))); } else if ch.is_loc(&Loc::HD, &IRVar::Ht{stack_offset: stack_offset+1, local: local, orig_name: orig_name.clone()}) { ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); v.push(ZPAQLOp::Dec(Loc::Reg(Reg::OtherReg(OtherReg::D)))); } else if !ch.is_loc(&Loc::HD, &(var.tovar())) { if !ch.is_loc(&Loc::Reg(Reg::A), &IRVar::Var{varid: if local {0} else {252} }) { v.push(ZPAQLOp::SetR{target: Reg::A, r: if local {0} else {252} }); } if stack_offset == 1 { v.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); } else if stack_offset < 256 { v.push(ZPAQLOp::AaddN{n: stack_offset as u8}); } else { // @TODO: use calc_number(offset) to add on r0 panic!("not implemented") } ch.remove_reg(&Loc::Reg(Reg::A)); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::D)), source: Loc::Reg(Reg::A)}); } ch.delete(var); ch.last_hold.insert(if loc == &Loc::Reg(Reg::A) { Loc::Reg(Reg::OtherReg(OtherReg::C)) } else {loc.clone()}, var.tovar()); ch.last_hold.insert(Loc::HD, var.tovar()); v.push(ZPAQLOp::Set{target: Loc::HD, source: if loc == &Loc::Reg(Reg::A) { Loc::Reg(Reg::OtherReg(OtherReg::C)) } else {loc.clone()} }); v }, &IRVar::Hx{addr} => { let mut v = vec![]; if loc == &Loc::Reg(Reg::A) { v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::C)), source: Loc::Reg(Reg::A)}); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::C))); } else if loc == &Loc::Reg(Reg::OtherReg(OtherReg::D)) || loc == &Loc::HD { error!("would overwrite source"); panic!("error") } if !ch.is_loc(&Loc::HD, &(var.tovar())) && !ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::D)), &IRVar::Number{value: addr}) { v.extend_from_slice(&calc_number(addr, &Loc::Reg(Reg::OtherReg(OtherReg::D)), ch)); } ch.delete(var); ch.last_hold.insert(if loc == &Loc::Reg(Reg::A) { Loc::Reg(Reg::OtherReg(OtherReg::C)) } else {loc.clone()}, var.tovar()); ch.last_hold.insert(Loc::HD, var.tovar()); v.push(ZPAQLOp::Set{target: Loc::HD, source: if loc == &Loc::Reg(Reg::A) { Loc::Reg(Reg::OtherReg(OtherReg::C)) } else {loc.clone()} } ); v }, &IRVar::Mx{addr} => { let mut v = vec![]; if loc == &Loc::Reg(Reg::A) { v.push(ZPAQLOp::Set{target: Loc::Reg(Reg::OtherReg(OtherReg::D)), source: Loc::Reg(Reg::A)}); ch.remove_reg(&Loc::Reg(Reg::OtherReg(OtherReg::D))); } else if loc == &Loc::Reg(Reg::OtherReg(OtherReg::C)) || loc == &Loc::MC { error!("would overwrite source"); panic!("error") } if !ch.is_loc(&Loc::MC, &(var.tovar())) && !ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::C)), &IRVar::Number{value: addr}) { v.extend_from_slice(&calc_number(addr, &Loc::Reg(Reg::OtherReg(OtherReg::C)), ch)); } ch.delete(var); ch.last_hold.insert(if loc == &Loc::Reg(Reg::A) { Loc::Reg(Reg::OtherReg(OtherReg::D)) } else {loc.clone()}, var.tovar()); ch.last_hold.insert(Loc::MC, var.tovar()); v.push(ZPAQLOp::Set{target: Loc::MC, source: if loc == &Loc::Reg(Reg::A) { Loc::Reg(Reg::OtherReg(OtherReg::D)) } else {loc.clone()} }); v }, &IRVar::M{index_varid} => { if loc == &Loc::Reg(Reg::OtherReg(OtherReg::C)) || loc == &Loc::MC { error!("would overwrite source"); panic!("error") } let mut v = vec![]; if !ch.is_loc(&Loc::Reg(Reg::OtherReg(OtherReg::C)), &IRVar::Var{varid: index_varid}) { v.push(ZPAQLOp::SetR{target: Reg::OtherReg(OtherReg::C), r: index_varid}); ch.last_hold.insert(Loc::Reg(Reg::OtherReg(OtherReg::C)), IRVar::Var{varid: index_varid}); } v.push(ZPAQLOp::Set{target: Loc::MC, source: loc.clone()}); ch.delete(var); ch.last_hold.insert(loc.clone(), var.tovar()); ch.last_hold.insert(Loc::MC, var.tovar()); v }, _ => unreachable!(), } }<|fim▁end|>
IRUOp::Invert => { code.push(ZPAQLOp::Not(Loc::Reg(Reg::A)) ); }, // bitwise ~ IRUOp::USub => { code.push(ZPAQLOp::Not(Loc::Reg(Reg::A)) ); code.push(ZPAQLOp::Inc(Loc::Reg(Reg::A))); }, // -x == ~x + 1 } ch.remove_reg(&Loc::Reg(Reg::A));
<|file_name|>dft.py<|end_file_name|><|fim▁begin|>import numpy as np from pyquante2.dft.functionals import xs,cvwn5 # Maybe move these to the functionals module and import from there? xname = dict(lda=xs,xs=xs,svwn=xs) cname = dict(lda=cvwn5,svwn=cvwn5,xs=None) def get_xc(grid,D,**kwargs): xcname = kwargs.get('xcname','lda') # Does not work on either gradient corrected functionals or spin-polarized functionals yet.<|fim▁hole|> xfunc = xname[xcname] cfunc = cname[xcname] rho = grid.getdens(D) fx,dfxa = xfunc(rho) if cfunc: fc,dfca,dfcb = cfunc(rho,rho) else: fc=dfca=dfcb=0 w = grid.points[:,3] Vxc = np.einsum('g,g,gI,gJ->IJ',w,dfxa+dfca,grid.bfamps,grid.bfamps) # The fx comes from either the up or the down spin, whereas the fc comes from # both (which is why x is called with either one, and c is called with both Exc = np.dot(w,2*fx+fc) return Exc,Vxc<|fim▁end|>
<|file_name|>orthographic.rs<|end_file_name|><|fim▁begin|>#[cfg(feature = "arbitrary")] use quickcheck::{Arbitrary, Gen}; use rand::distributions::{Distribution, Standard}; use rand::Rng; #[cfg(feature = "serde-serialize")] use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::fmt; use std::mem; use simba::scalar::RealField; use crate::base::dimension::U3; use crate::base::helper; use crate::base::storage::Storage; use crate::base::{Matrix4, Vector, Vector3}; use crate::geometry::{Point3, Projective3}; /// A 3D orthographic projection stored as a homogeneous 4x4 matrix. pub struct Orthographic3<N: RealField> { matrix: Matrix4<N>, } impl<N: RealField> Copy for Orthographic3<N> {} impl<N: RealField> Clone for Orthographic3<N> { #[inline] fn clone(&self) -> Self { Self::from_matrix_unchecked(self.matrix.clone()) } } impl<N: RealField> fmt::Debug for Orthographic3<N> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { self.matrix.fmt(f) } } impl<N: RealField> PartialEq for Orthographic3<N> { #[inline] fn eq(&self, right: &Self) -> bool { self.matrix == right.matrix } } #[cfg(feature = "serde-serialize")] impl<N: RealField + Serialize> Serialize for Orthographic3<N> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.matrix.serialize(serializer) } } #[cfg(feature = "serde-serialize")] impl<'a, N: RealField + Deserialize<'a>> Deserialize<'a> for Orthographic3<N> { fn deserialize<Des>(deserializer: Des) -> Result<Self, Des::Error> where Des: Deserializer<'a>, { let matrix = Matrix4::<N>::deserialize(deserializer)?; Ok(Self::from_matrix_unchecked(matrix)) } } impl<N: RealField> Orthographic3<N> { /// Creates a new orthographic projection matrix. /// /// This follows the OpenGL convention, so this will flip the `z` axis. /// /// # Example /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::{Orthographic3, Point3}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// // Check this projection actually transforms the view cuboid into the double-unit cube. /// // See https://www.nalgebra.org/projections/#orthographic-projection for more details. /// let p1 = Point3::new(1.0, 2.0, -0.1); /// let p2 = Point3::new(1.0, 2.0, -1000.0); /// let p3 = Point3::new(1.0, 20.0, -0.1); /// let p4 = Point3::new(1.0, 20.0, -1000.0); /// let p5 = Point3::new(10.0, 2.0, -0.1); /// let p6 = Point3::new(10.0, 2.0, -1000.0); /// let p7 = Point3::new(10.0, 20.0, -0.1); /// let p8 = Point3::new(10.0, 20.0, -1000.0); /// /// assert_relative_eq!(proj.project_point(&p1), Point3::new(-1.0, -1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p2), Point3::new(-1.0, -1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p3), Point3::new(-1.0, 1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p4), Point3::new(-1.0, 1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p5), Point3::new( 1.0, -1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p6), Point3::new( 1.0, -1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p7), Point3::new( 1.0, 1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p8), Point3::new( 1.0, 1.0, 1.0)); /// /// // This also works with flipped axis. In other words, we allow that /// // `left > right`, `bottom > top`, and/or `znear > zfar`. /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// /// assert_relative_eq!(proj.project_point(&p1), Point3::new( 1.0, 1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p2), Point3::new( 1.0, 1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p3), Point3::new( 1.0, -1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p4), Point3::new( 1.0, -1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p5), Point3::new(-1.0, 1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p6), Point3::new(-1.0, 1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p7), Point3::new(-1.0, -1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p8), Point3::new(-1.0, -1.0, -1.0)); /// ``` #[inline] pub fn new(left: N, right: N, bottom: N, top: N, znear: N, zfar: N) -> Self { let matrix = Matrix4::<N>::identity(); let mut res = Self::from_matrix_unchecked(matrix); res.set_left_and_right(left, right); res.set_bottom_and_top(bottom, top); res.set_znear_and_zfar(znear, zfar); res } /// Wraps the given matrix to interpret it as a 3D orthographic matrix. /// /// It is not checked whether or not the given matrix actually represents an orthographic /// projection. /// /// # Example /// ``` /// # use nalgebra::{Orthographic3, Point3, Matrix4}; /// let mat = Matrix4::new( /// 2.0 / 9.0, 0.0, 0.0, -11.0 / 9.0, /// 0.0, 2.0 / 18.0, 0.0, -22.0 / 18.0, /// 0.0, 0.0, -2.0 / 999.9, -1000.1 / 999.9, /// 0.0, 0.0, 0.0, 1.0 /// ); /// let proj = Orthographic3::from_matrix_unchecked(mat); /// assert_eq!(proj, Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0)); /// ``` #[inline] pub fn from_matrix_unchecked(matrix: Matrix4<N>) -> Self { Self { matrix: matrix } } /// Creates a new orthographic projection matrix from an aspect ratio and the vertical field of view. #[inline] pub fn from_fov(aspect: N, vfov: N, znear: N, zfar: N) -> Self { assert!( znear != zfar, "The far plane must not be equal to the near plane." ); assert!( !relative_eq!(aspect, N::zero()), "The apsect ratio must not be zero." ); let half: N = crate::convert(0.5); let width = zfar * (vfov * half).tan(); let height = width / aspect; Self::new( -width * half, width * half, -height * half, height * half, znear, zfar, ) } /// Retrieves the inverse of the underlying homogeneous matrix. /// /// # Example /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::{Orthographic3, Point3, Matrix4}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// let inv = proj.inverse(); /// /// assert_relative_eq!(inv * proj.as_matrix(), Matrix4::identity()); /// assert_relative_eq!(proj.as_matrix() * inv, Matrix4::identity()); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// let inv = proj.inverse(); /// assert_relative_eq!(inv * proj.as_matrix(), Matrix4::identity()); /// assert_relative_eq!(proj.as_matrix() * inv, Matrix4::identity()); /// ``` #[inline] pub fn inverse(&self) -> Matrix4<N> { let mut res = self.to_homogeneous(); let inv_m11 = N::one() / self.matrix[(0, 0)]; let inv_m22 = N::one() / self.matrix[(1, 1)]; let inv_m33 = N::one() / self.matrix[(2, 2)]; res[(0, 0)] = inv_m11; res[(1, 1)] = inv_m22; res[(2, 2)] = inv_m33; res[(0, 3)] = -self.matrix[(0, 3)] * inv_m11; res[(1, 3)] = -self.matrix[(1, 3)] * inv_m22; res[(2, 3)] = -self.matrix[(2, 3)] * inv_m33; res } /// Computes the corresponding homogeneous matrix. /// /// # Example /// ``` /// # use nalgebra::{Orthographic3, Point3, Matrix4}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// let expected = Matrix4::new( /// 2.0 / 9.0, 0.0, 0.0, -11.0 / 9.0, /// 0.0, 2.0 / 18.0, 0.0, -22.0 / 18.0, /// 0.0, 0.0, -2.0 / 999.9, -1000.1 / 999.9, /// 0.0, 0.0, 0.0, 1.0 /// ); /// assert_eq!(proj.to_homogeneous(), expected); /// ``` #[inline] pub fn to_homogeneous(&self) -> Matrix4<N> { self.matrix } /// A reference to the underlying homogeneous transformation matrix. /// /// # Example /// ``` /// # use nalgebra::{Orthographic3, Point3, Matrix4}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// let expected = Matrix4::new( /// 2.0 / 9.0, 0.0, 0.0, -11.0 / 9.0, /// 0.0, 2.0 / 18.0, 0.0, -22.0 / 18.0, /// 0.0, 0.0, -2.0 / 999.9, -1000.1 / 999.9, /// 0.0, 0.0, 0.0, 1.0 /// ); /// assert_eq!(*proj.as_matrix(), expected); /// ``` #[inline] pub fn as_matrix(&self) -> &Matrix4<N> { &self.matrix } /// A reference to this transformation seen as a `Projective3`. /// /// # Example /// ``` /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_eq!(proj.as_projective().to_homogeneous(), proj.to_homogeneous()); /// ``` #[inline] pub fn as_projective(&self) -> &Projective3<N> { unsafe { mem::transmute(self) } } /// This transformation seen as a `Projective3`. /// /// # Example /// ``` /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_eq!(proj.to_projective().to_homogeneous(), proj.to_homogeneous()); /// ``` #[inline] pub fn to_projective(&self) -> Projective3<N> { Projective3::from_matrix_unchecked(self.matrix) } /// Retrieves the underlying homogeneous matrix. /// /// # Example /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::{Orthographic3, Point3, Matrix4}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// let expected = Matrix4::new( /// 2.0 / 9.0, 0.0, 0.0, -11.0 / 9.0, /// 0.0, 2.0 / 18.0, 0.0, -22.0 / 18.0, /// 0.0, 0.0, -2.0 / 999.9, -1000.1 / 999.9, /// 0.0, 0.0, 0.0, 1.0 /// ); /// assert_eq!(proj.into_inner(), expected); /// ``` #[inline] pub fn into_inner(self) -> Matrix4<N> { self.matrix } /// Retrieves the underlying homogeneous matrix. /// Deprecated: Use [Orthographic3::into_inner] instead. #[deprecated(note = "use `.into_inner()` instead")] #[inline] pub fn unwrap(self) -> Matrix4<N> { self.matrix } /// The left offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_relative_eq!(proj.left(), 1.0, epsilon = 1.0e-6); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// assert_relative_eq!(proj.left(), 10.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn left(&self) -> N { (-N::one() - self.matrix[(0, 3)]) / self.matrix[(0, 0)] } /// The right offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_relative_eq!(proj.right(), 10.0, epsilon = 1.0e-6); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// assert_relative_eq!(proj.right(), 1.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn right(&self) -> N { (N::one() - self.matrix[(0, 3)]) / self.matrix[(0, 0)] } /// The bottom offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_relative_eq!(proj.bottom(), 2.0, epsilon = 1.0e-6); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// assert_relative_eq!(proj.bottom(), 20.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn bottom(&self) -> N { (-N::one() - self.matrix[(1, 3)]) / self.matrix[(1, 1)] } /// The top offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_relative_eq!(proj.top(), 20.0, epsilon = 1.0e-6); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// assert_relative_eq!(proj.top(), 2.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn top(&self) -> N { (N::one() - self.matrix[(1, 3)]) / self.matrix[(1, 1)] } /// The near plane offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_relative_eq!(proj.znear(), 0.1, epsilon = 1.0e-6); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// assert_relative_eq!(proj.znear(), 1000.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn znear(&self) -> N { (N::one() + self.matrix[(2, 3)]) / self.matrix[(2, 2)] } /// The far plane offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// assert_relative_eq!(proj.zfar(), 1000.0, epsilon = 1.0e-6); /// /// let proj = Orthographic3::new(10.0, 1.0, 20.0, 2.0, 1000.0, 0.1); /// assert_relative_eq!(proj.zfar(), 0.1, epsilon = 1.0e-6); /// ``` #[inline] pub fn zfar(&self) -> N { (-N::one() + self.matrix[(2, 3)]) / self.matrix[(2, 2)] } // FIXME: when we get specialization, specialize the Mul impl instead. /// Projects a point. Faster than matrix multiplication. /// /// # Example /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::{Orthographic3, Point3}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// /// let p1 = Point3::new(1.0, 2.0, -0.1); /// let p2 = Point3::new(1.0, 2.0, -1000.0); /// let p3 = Point3::new(1.0, 20.0, -0.1); /// let p4 = Point3::new(1.0, 20.0, -1000.0); /// let p5 = Point3::new(10.0, 2.0, -0.1); /// let p6 = Point3::new(10.0, 2.0, -1000.0); /// let p7 = Point3::new(10.0, 20.0, -0.1); /// let p8 = Point3::new(10.0, 20.0, -1000.0); ///<|fim▁hole|> /// assert_relative_eq!(proj.project_point(&p1), Point3::new(-1.0, -1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p2), Point3::new(-1.0, -1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p3), Point3::new(-1.0, 1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p4), Point3::new(-1.0, 1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p5), Point3::new( 1.0, -1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p6), Point3::new( 1.0, -1.0, 1.0)); /// assert_relative_eq!(proj.project_point(&p7), Point3::new( 1.0, 1.0, -1.0)); /// assert_relative_eq!(proj.project_point(&p8), Point3::new( 1.0, 1.0, 1.0)); /// ``` #[inline] pub fn project_point(&self, p: &Point3<N>) -> Point3<N> { Point3::new( self.matrix[(0, 0)] * p[0] + self.matrix[(0, 3)], self.matrix[(1, 1)] * p[1] + self.matrix[(1, 3)], self.matrix[(2, 2)] * p[2] + self.matrix[(2, 3)], ) } /// Un-projects a point. Faster than multiplication by the underlying matrix inverse. /// /// # Example /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::{Orthographic3, Point3}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// /// let p1 = Point3::new(-1.0, -1.0, -1.0); /// let p2 = Point3::new(-1.0, -1.0, 1.0); /// let p3 = Point3::new(-1.0, 1.0, -1.0); /// let p4 = Point3::new(-1.0, 1.0, 1.0); /// let p5 = Point3::new( 1.0, -1.0, -1.0); /// let p6 = Point3::new( 1.0, -1.0, 1.0); /// let p7 = Point3::new( 1.0, 1.0, -1.0); /// let p8 = Point3::new( 1.0, 1.0, 1.0); /// /// assert_relative_eq!(proj.unproject_point(&p1), Point3::new(1.0, 2.0, -0.1), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p2), Point3::new(1.0, 2.0, -1000.0), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p3), Point3::new(1.0, 20.0, -0.1), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p4), Point3::new(1.0, 20.0, -1000.0), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p5), Point3::new(10.0, 2.0, -0.1), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p6), Point3::new(10.0, 2.0, -1000.0), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p7), Point3::new(10.0, 20.0, -0.1), epsilon = 1.0e-6); /// assert_relative_eq!(proj.unproject_point(&p8), Point3::new(10.0, 20.0, -1000.0), epsilon = 1.0e-6); /// ``` #[inline] pub fn unproject_point(&self, p: &Point3<N>) -> Point3<N> { Point3::new( (p[0] - self.matrix[(0, 3)]) / self.matrix[(0, 0)], (p[1] - self.matrix[(1, 3)]) / self.matrix[(1, 1)], (p[2] - self.matrix[(2, 3)]) / self.matrix[(2, 2)], ) } // FIXME: when we get specialization, specialize the Mul impl instead. /// Projects a vector. Faster than matrix multiplication. /// /// Vectors are not affected by the translation part of the projection. /// /// # Example /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::{Orthographic3, Vector3}; /// let proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// /// let v1 = Vector3::x(); /// let v2 = Vector3::y(); /// let v3 = Vector3::z(); /// /// assert_relative_eq!(proj.project_vector(&v1), Vector3::x() * 2.0 / 9.0); /// assert_relative_eq!(proj.project_vector(&v2), Vector3::y() * 2.0 / 18.0); /// assert_relative_eq!(proj.project_vector(&v3), Vector3::z() * -2.0 / 999.9); /// ``` #[inline] pub fn project_vector<SB>(&self, p: &Vector<N, U3, SB>) -> Vector3<N> where SB: Storage<N, U3>, { Vector3::new( self.matrix[(0, 0)] * p[0], self.matrix[(1, 1)] * p[1], self.matrix[(2, 2)] * p[2], ) } /// Sets the left offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_left(2.0); /// assert_relative_eq!(proj.left(), 2.0, epsilon = 1.0e-6); /// /// // It is OK to set a left offset greater than the current right offset. /// proj.set_left(20.0); /// assert_relative_eq!(proj.left(), 20.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_left(&mut self, left: N) { let right = self.right(); self.set_left_and_right(left, right); } /// Sets the right offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_right(15.0); /// assert_relative_eq!(proj.right(), 15.0, epsilon = 1.0e-6); /// /// // It is OK to set a right offset smaller than the current left offset. /// proj.set_right(-3.0); /// assert_relative_eq!(proj.right(), -3.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_right(&mut self, right: N) { let left = self.left(); self.set_left_and_right(left, right); } /// Sets the bottom offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_bottom(8.0); /// assert_relative_eq!(proj.bottom(), 8.0, epsilon = 1.0e-6); /// /// // It is OK to set a bottom offset greater than the current top offset. /// proj.set_bottom(50.0); /// assert_relative_eq!(proj.bottom(), 50.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_bottom(&mut self, bottom: N) { let top = self.top(); self.set_bottom_and_top(bottom, top); } /// Sets the top offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_top(15.0); /// assert_relative_eq!(proj.top(), 15.0, epsilon = 1.0e-6); /// /// // It is OK to set a top offset smaller than the current bottom offset. /// proj.set_top(-3.0); /// assert_relative_eq!(proj.top(), -3.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_top(&mut self, top: N) { let bottom = self.bottom(); self.set_bottom_and_top(bottom, top); } /// Sets the near plane offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_znear(8.0); /// assert_relative_eq!(proj.znear(), 8.0, epsilon = 1.0e-6); /// /// // It is OK to set a znear greater than the current zfar. /// proj.set_znear(5000.0); /// assert_relative_eq!(proj.znear(), 5000.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_znear(&mut self, znear: N) { let zfar = self.zfar(); self.set_znear_and_zfar(znear, zfar); } /// Sets the far plane offset of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_zfar(15.0); /// assert_relative_eq!(proj.zfar(), 15.0, epsilon = 1.0e-6); /// /// // It is OK to set a zfar smaller than the current znear. /// proj.set_zfar(-3.0); /// assert_relative_eq!(proj.zfar(), -3.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_zfar(&mut self, zfar: N) { let znear = self.znear(); self.set_znear_and_zfar(znear, zfar); } /// Sets the view cuboid offsets along the `x` axis. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_left_and_right(7.0, 70.0); /// assert_relative_eq!(proj.left(), 7.0, epsilon = 1.0e-6); /// assert_relative_eq!(proj.right(), 70.0, epsilon = 1.0e-6); /// /// // It is also OK to have `left > right`. /// proj.set_left_and_right(70.0, 7.0); /// assert_relative_eq!(proj.left(), 70.0, epsilon = 1.0e-6); /// assert_relative_eq!(proj.right(), 7.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_left_and_right(&mut self, left: N, right: N) { assert!( left != right, "The left corner must not be equal to the right corner." ); self.matrix[(0, 0)] = crate::convert::<_, N>(2.0) / (right - left); self.matrix[(0, 3)] = -(right + left) / (right - left); } /// Sets the view cuboid offsets along the `y` axis. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_bottom_and_top(7.0, 70.0); /// assert_relative_eq!(proj.bottom(), 7.0, epsilon = 1.0e-6); /// assert_relative_eq!(proj.top(), 70.0, epsilon = 1.0e-6); /// /// // It is also OK to have `bottom > top`. /// proj.set_bottom_and_top(70.0, 7.0); /// assert_relative_eq!(proj.bottom(), 70.0, epsilon = 1.0e-6); /// assert_relative_eq!(proj.top(), 7.0, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_bottom_and_top(&mut self, bottom: N, top: N) { assert!( bottom != top, "The top corner must not be equal to the bottom corner." ); self.matrix[(1, 1)] = crate::convert::<_, N>(2.0) / (top - bottom); self.matrix[(1, 3)] = -(top + bottom) / (top - bottom); } /// Sets the near and far plane offsets of the view cuboid. /// /// ``` /// # #[macro_use] extern crate approx; /// # use nalgebra::Orthographic3; /// let mut proj = Orthographic3::new(1.0, 10.0, 2.0, 20.0, 0.1, 1000.0); /// proj.set_znear_and_zfar(50.0, 5000.0); /// assert_relative_eq!(proj.znear(), 50.0, epsilon = 1.0e-6); /// assert_relative_eq!(proj.zfar(), 5000.0, epsilon = 1.0e-6); /// /// // It is also OK to have `znear > zfar`. /// proj.set_znear_and_zfar(5000.0, 0.5); /// assert_relative_eq!(proj.znear(), 5000.0, epsilon = 1.0e-6); /// assert_relative_eq!(proj.zfar(), 0.5, epsilon = 1.0e-6); /// ``` #[inline] pub fn set_znear_and_zfar(&mut self, znear: N, zfar: N) { assert!( zfar != znear, "The near-plane and far-plane must not be superimposed." ); self.matrix[(2, 2)] = -crate::convert::<_, N>(2.0) / (zfar - znear); self.matrix[(2, 3)] = -(zfar + znear) / (zfar - znear); } } impl<N: RealField> Distribution<Orthographic3<N>> for Standard where Standard: Distribution<N>, { fn sample<R: Rng + ?Sized>(&self, r: &mut R) -> Orthographic3<N> { let left = r.gen(); let right = helper::reject_rand(r, |x: &N| *x > left); let bottom = r.gen(); let top = helper::reject_rand(r, |x: &N| *x > bottom); let znear = r.gen(); let zfar = helper::reject_rand(r, |x: &N| *x > znear); Orthographic3::new(left, right, bottom, top, znear, zfar) } } #[cfg(feature = "arbitrary")] impl<N: RealField + Arbitrary> Arbitrary for Orthographic3<N> where Matrix4<N>: Send, { fn arbitrary<G: Gen>(g: &mut G) -> Self { let left = Arbitrary::arbitrary(g); let right = helper::reject(g, |x: &N| *x > left); let bottom = Arbitrary::arbitrary(g); let top = helper::reject(g, |x: &N| *x > bottom); let znear = Arbitrary::arbitrary(g); let zfar = helper::reject(g, |x: &N| *x > znear); Self::new(left, right, bottom, top, znear, zfar) } } impl<N: RealField> From<Orthographic3<N>> for Matrix4<N> { #[inline] fn from(orth: Orthographic3<N>) -> Self { orth.into_inner() } }<|fim▁end|>
<|file_name|>user_module.cc<|end_file_name|><|fim▁begin|>/** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2013 Ben Vanik. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #include "xenia/kernel/user_module.h" #include <vector> #include "xenia/base/byte_stream.h" #include "xenia/base/logging.h" #include "xenia/cpu/elf_module.h" #include "xenia/cpu/processor.h" #include "xenia/cpu/xex_module.h" #include "xenia/emulator.h" #include "xenia/kernel/xfile.h" #include "xenia/kernel/xthread.h" namespace xe { namespace kernel { UserModule::UserModule(KernelState* kernel_state) : XModule(kernel_state, ModuleType::kUserModule) {} UserModule::~UserModule() { Unload(); } uint32_t UserModule::title_id() const { if (module_format_ != kModuleFormatXex) { return 0; } auto header = xex_header(); for (uint32_t i = 0; i < header->header_count; i++) { auto& opt_header = header->headers[i]; if (opt_header.key == XEX_HEADER_EXECUTION_INFO) { auto opt_header_ptr = reinterpret_cast<const uint8_t*>(header) + opt_header.offset; auto opt_exec_info = reinterpret_cast<const xex2_opt_execution_info*>(opt_header_ptr); return static_cast<uint32_t>(opt_exec_info->title_id); } } return 0; } X_STATUS UserModule::LoadFromFile(std::string path) { X_STATUS result = X_STATUS_UNSUCCESSFUL; // Resolve the file to open. // TODO(benvanik): make this code shared? auto fs_entry = kernel_state()->file_system()->ResolvePath(path); if (!fs_entry) { XELOGE("File not found: %s", path.c_str()); return X_STATUS_NO_SUCH_FILE; } path_ = fs_entry->absolute_path(); name_ = NameFromPath(path_); // If the FS supports mapping, map the file in and load from that. if (fs_entry->can_map()) { // Map. auto mmap = fs_entry->OpenMapped(MappedMemory::Mode::kRead); if (!mmap) { return result; } // Load the module. result = LoadFromMemory(mmap->data(), mmap->size()); } else { std::vector<uint8_t> buffer(fs_entry->size()); // Open file for reading. vfs::File* file = nullptr; result = fs_entry->Open(vfs::FileAccess::kGenericRead, &file); if (XFAILED(result)) { return result; } // Read entire file into memory. // Ugh. size_t bytes_read = 0; result = file->ReadSync(buffer.data(), buffer.size(), 0, &bytes_read); if (XFAILED(result)) { return result; } // Load the module. result = LoadFromMemory(buffer.data(), bytes_read); // Close the file. file->Destroy(); } return result; } X_STATUS UserModule::LoadFromMemory(const void* addr, const size_t length) { auto processor = kernel_state()->processor(); auto magic = xe::load_and_swap<uint32_t>(addr); if (magic == 'XEX2') { module_format_ = kModuleFormatXex; } else if (magic == 0x7F454C46 /* 0x7F 'ELF' */) { module_format_ = kModuleFormatElf; } else { XELOGE("Unknown module magic: %.8X", magic); return X_STATUS_NOT_IMPLEMENTED; } if (module_format_ == kModuleFormatXex) { // Prepare the module for execution. // Runtime takes ownership. auto xex_module = std::make_unique<cpu::XexModule>(processor, kernel_state()); if (!xex_module->Load(name_, path_, addr, length)) { return X_STATUS_UNSUCCESSFUL; } processor_module_ = xex_module.get(); if (!processor->AddModule(std::move(xex_module))) { return X_STATUS_UNSUCCESSFUL; } // Copy the xex2 header into guest memory. auto header = this->xex_module()->xex_header(); auto security_header = this->xex_module()->xex_security_info(); guest_xex_header_ = memory()->SystemHeapAlloc(header->header_size); uint8_t* xex_header_ptr = memory()->TranslateVirtual(guest_xex_header_); std::memcpy(xex_header_ptr, header, header->header_size); // Setup the loader data entry auto ldr_data = memory()->TranslateVirtual<X_LDR_DATA_TABLE_ENTRY*>(hmodule_ptr_); ldr_data->dll_base = 0; // GetProcAddress will read this. ldr_data->xex_header_base = guest_xex_header_; ldr_data->full_image_size = security_header->image_size; this->xex_module()->GetOptHeader(XEX_HEADER_ENTRY_POINT, &ldr_data->entry_point); xe::be<uint32_t>* image_base_ptr = nullptr; if (this->xex_module()->GetOptHeader(XEX_HEADER_IMAGE_BASE_ADDRESS, &image_base_ptr)) { ldr_data->image_base = *image_base_ptr; } // Cache some commonly used headers... this->xex_module()->GetOptHeader(XEX_HEADER_ENTRY_POINT, &entry_point_); this->xex_module()->GetOptHeader(XEX_HEADER_DEFAULT_STACK_SIZE, &stack_size_); is_dll_module_ = !!(header->module_flags & XEX_MODULE_DLL_MODULE); } else if (module_format_ == kModuleFormatElf) { auto elf_module = std::make_unique<cpu::ElfModule>(processor, kernel_state()); if (!elf_module->Load(name_, path_, addr, length)) { return X_STATUS_UNSUCCESSFUL; } entry_point_ = elf_module->entry_point(); stack_size_ = 1024 * 1024; // 1 MB is_dll_module_ = false; // Hardcoded not a DLL (for now) processor_module_ = elf_module.get(); if (!processor->AddModule(std::move(elf_module))) { return X_STATUS_UNSUCCESSFUL; } } OnLoad(); return X_STATUS_SUCCESS; } X_STATUS UserModule::Unload() { if (module_format_ == kModuleFormatXex && (!processor_module_ || !xex_module()->loaded())) { // Quick abort. return X_STATUS_SUCCESS; } if (module_format_ == kModuleFormatXex && processor_module_ && xex_module()->Unload()) { OnUnload(); return X_STATUS_SUCCESS; } return X_STATUS_UNSUCCESSFUL; } uint32_t UserModule::GetProcAddressByOrdinal(uint16_t ordinal) { return xex_module()->GetProcAddress(ordinal); } uint32_t UserModule::GetProcAddressByName(const char* name) { return xex_module()->GetProcAddress(name); } X_STATUS UserModule::GetSection(const char* name, uint32_t* out_section_data, uint32_t* out_section_size) { xex2_opt_resource_info* resource_header = nullptr; if (!cpu::XexModule::GetOptHeader(xex_header(), XEX_HEADER_RESOURCE_INFO, &resource_header)) { // No resources. return X_STATUS_NOT_FOUND; } uint32_t count = (resource_header->size - 4) / sizeof(xex2_resource); for (uint32_t i = 0; i < count; i++) { auto& res = resource_header->resources[i]; if (std::strncmp(name, res.name, 8) == 0) { // Found! *out_section_data = res.address; *out_section_size = res.size; return X_STATUS_SUCCESS; } } return X_STATUS_NOT_FOUND; } X_STATUS UserModule::GetOptHeader(xe_xex2_header_keys key, void** out_ptr) { assert_not_null(out_ptr); if (module_format_ == kModuleFormatElf) { // Quick die. return X_STATUS_UNSUCCESSFUL; } bool ret = xex_module()->GetOptHeader(key, out_ptr); if (!ret) { return X_STATUS_NOT_FOUND; } return X_STATUS_SUCCESS; } X_STATUS UserModule::GetOptHeader(xe_xex2_header_keys key, uint32_t* out_header_guest_ptr) { if (module_format_ == kModuleFormatElf) { // Quick die. return X_STATUS_UNSUCCESSFUL; } auto header = memory()->TranslateVirtual<const xex2_header*>(guest_xex_header_); if (!header) { return X_STATUS_UNSUCCESSFUL; } return GetOptHeader(memory()->virtual_membase(), header, key, out_header_guest_ptr); } X_STATUS UserModule::GetOptHeader(uint8_t* membase, const xex2_header* header, xe_xex2_header_keys key, uint32_t* out_header_guest_ptr) { assert_not_null(out_header_guest_ptr); uint32_t field_value = 0; bool field_found = false; for (uint32_t i = 0; i < header->header_count; i++) { auto& opt_header = header->headers[i]; if (opt_header.key != key) { continue; } field_found = true; switch (opt_header.key & 0xFF) { case 0x00: // Return data stored in header value. field_value = opt_header.value; break; case 0x01: // Return pointer to data stored in header value. field_value = static_cast<uint32_t>( reinterpret_cast<const uint8_t*>(&opt_header.value) - membase); break; default: // Data stored at offset to header. field_value = static_cast<uint32_t>( reinterpret_cast<const uint8_t*>(header) - membase) + opt_header.offset; break; } break; } *out_header_guest_ptr = field_value; if (!field_found) { return X_STATUS_NOT_FOUND; } return X_STATUS_SUCCESS; } object_ref<XThread> UserModule::Launch(uint32_t flags) { XELOGI("Launching module..."); // Create a thread to run in. // We start suspended so we can run the debugger prep. auto thread = object_ref<XThread>( new XThread(kernel_state(), stack_size_, 0, entry_point_, 0, X_CREATE_SUSPENDED, true, true)); // We know this is the 'main thread'. char thread_name[32]; std::snprintf(thread_name, xe::countof(thread_name), "Main XThread%08X", thread->handle()); thread->set_name(thread_name); X_STATUS result = thread->Create(); if (XFAILED(result)) { XELOGE("Could not create launch thread: %.8X", result); return nullptr; } // Waits for a debugger client, if desired. if (emulator()->debugger()) { emulator()->debugger()->PreLaunch(); } // Resume the thread now. // If the debugger has requested a suspend this will just decrement the // suspend count without resuming it until the debugger wants. thread->Resume(); return thread; } bool UserModule::Save(ByteStream* stream) { if (!XModule::Save(stream)) { return false; } // A lot of the information stored on this class can be reconstructed at // runtime. return true; } object_ref<UserModule> UserModule::Restore(KernelState* kernel_state, ByteStream* stream, std::string path) { auto module = new UserModule(kernel_state); // XModule::Save took care of this earlier... // TODO: Find a nicer way to represent that here. if (!module->RestoreObject(stream)) { return nullptr; } auto result = module->LoadFromFile(path); if (XFAILED(result)) { XELOGD("UserModule::Restore LoadFromFile(%s) FAILED - code %.8X", path.c_str(), result); return nullptr; } if (!kernel_state->RegisterUserModule(retain_object(module))) { // Already loaded? assert_always(); } return object_ref<UserModule>(module); } void UserModule::Dump() { if (module_format_ == kModuleFormatElf) { // Quick die. return; } StringBuffer sb; xe::cpu::ExportResolver* export_resolver = kernel_state_->emulator()->export_resolver(); auto header = xex_header(); // XEX header. sb.AppendFormat("Module %s:\n", path_.c_str()); sb.AppendFormat(" Module Flags: %.8X\n", (uint32_t)header->module_flags); // Security header auto security_info = xex_module()->xex_security_info(); sb.AppendFormat("Security Header:\n"); sb.AppendFormat(" Image Flags: %.8X\n", (uint32_t)security_info->image_flags); sb.AppendFormat(" Load Address: %.8X\n", (uint32_t)security_info->load_address); sb.AppendFormat(" Image Size: %.8X\n", (uint32_t)security_info->image_size); sb.AppendFormat(" Export Table: %.8X\n", (uint32_t)security_info->export_table); // Optional headers sb.AppendFormat("Optional Header Count: %d\n", (uint32_t)header->header_count); for (uint32_t i = 0; i < header->header_count; i++) { auto& opt_header = header->headers[i]; // Stash a pointer (although this isn't used in every case) auto opt_header_ptr = reinterpret_cast<const uint8_t*>(header) + opt_header.offset; switch (opt_header.key) { case XEX_HEADER_RESOURCE_INFO: { sb.AppendFormat(" XEX_HEADER_RESOURCE_INFO:\n"); auto opt_resource_info = reinterpret_cast<const xex2_opt_resource_info*>(opt_header_ptr); uint32_t count = (opt_resource_info->size - 4) / 16; for (uint32_t j = 0; j < count; j++) { auto& res = opt_resource_info->resources[j]; // Manually NULL-terminate the name. char name[9]; std::memcpy(name, res.name, sizeof(res.name)); name[8] = 0; sb.AppendFormat( " %-8s %.8X-%.8X, %db\n", name, (uint32_t)res.address, (uint32_t)res.address + (uint32_t)res.size, (uint32_t)res.size); } } break; case XEX_HEADER_FILE_FORMAT_INFO: { sb.AppendFormat(" XEX_HEADER_FILE_FORMAT_INFO (TODO):\n"); } break; case XEX_HEADER_DELTA_PATCH_DESCRIPTOR: { sb.AppendFormat(" XEX_HEADER_DELTA_PATCH_DESCRIPTOR (TODO):\n"); } break; case XEX_HEADER_BOUNDING_PATH: { auto opt_bound_path = reinterpret_cast<const xex2_opt_bound_path*>(opt_header_ptr); sb.AppendFormat(" XEX_HEADER_BOUNDING_PATH: %s\n", opt_bound_path->path); } break; case XEX_HEADER_ORIGINAL_BASE_ADDRESS: { sb.AppendFormat(" XEX_HEADER_ORIGINAL_BASE_ADDRESS: %.8X\n", (uint32_t)opt_header.value); } break; case XEX_HEADER_ENTRY_POINT: { sb.AppendFormat(" XEX_HEADER_ENTRY_POINT: %.8X\n", (uint32_t)opt_header.value); } break; case XEX_HEADER_IMAGE_BASE_ADDRESS: { sb.AppendFormat(" XEX_HEADER_IMAGE_BASE_ADDRESS: %.8X\n", (uint32_t)opt_header.value); } break; case XEX_HEADER_IMPORT_LIBRARIES: { sb.AppendFormat(" XEX_HEADER_IMPORT_LIBRARIES:\n"); auto opt_import_libraries = reinterpret_cast<const xex2_opt_import_libraries*>(opt_header_ptr); // FIXME: Don't know if 32 is the actual limit, but haven't seen more // than 2. const char* string_table[32]; std::memset(string_table, 0, sizeof(string_table)); // Parse the string table for (size_t l = 0, j = 0; l < opt_import_libraries->string_table_size; j++) { assert_true(j < xe::countof(string_table)); const char* str = opt_import_libraries->string_table + l; string_table[j] = str; l += std::strlen(str) + 1; // Padding if ((l % 4) != 0) { l += 4 - (l % 4); } } auto libraries = reinterpret_cast<const uint8_t*>(opt_import_libraries) + opt_import_libraries->string_table_size + 12; uint32_t library_offset = 0; uint32_t library_count = opt_import_libraries->library_count; for (uint32_t l = 0; l < library_count; l++) { auto library = reinterpret_cast<const xex2_import_library*>( libraries + library_offset); auto name = string_table[library->name_index & 0xFF]; sb.AppendFormat(" %s - %d imports\n", name, (uint16_t)library->count); // Manually byteswap these because of the bitfields. xex2_version version, version_min; version.value = xe::byte_swap<uint32_t>(library->version.value); version_min.value = xe::byte_swap<uint32_t>(library->version_min.value); sb.AppendFormat(" Version: %d.%d.%d.%d\n", version.major, version.minor, version.build, version.qfe); sb.AppendFormat(" Min Version: %d.%d.%d.%d\n", version_min.major, version_min.minor, version_min.build, version_min.qfe); library_offset += library->size; } } break; case XEX_HEADER_CHECKSUM_TIMESTAMP: { sb.AppendFormat(" XEX_HEADER_CHECKSUM_TIMESTAMP (TODO):\n"); } break; case XEX_HEADER_ORIGINAL_PE_NAME: { auto opt_pe_name = reinterpret_cast<const xex2_opt_original_pe_name*>(opt_header_ptr); sb.AppendFormat(" XEX_HEADER_ORIGINAL_PE_NAME: %s\n", opt_pe_name->name); } break; case XEX_HEADER_STATIC_LIBRARIES: { sb.AppendFormat(" XEX_HEADER_STATIC_LIBRARIES:\n"); auto opt_static_libraries = reinterpret_cast<const xex2_opt_static_libraries*>(opt_header_ptr); uint32_t count = (opt_static_libraries->size - 4) / 0x10; for (uint32_t l = 0; l < count; l++) { auto& library = opt_static_libraries->libraries[l]; sb.AppendFormat(" %-8s : %d.%d.%d.%d\n", library.name, static_cast<uint16_t>(library.version_major), static_cast<uint16_t>(library.version_minor), static_cast<uint16_t>(library.version_build), static_cast<uint16_t>(library.version_qfe)); } } break; case XEX_HEADER_TLS_INFO: { sb.AppendFormat(" XEX_HEADER_TLS_INFO:\n"); auto opt_tls_info = reinterpret_cast<const xex2_opt_tls_info*>(opt_header_ptr); sb.AppendFormat(" Slot Count: %d\n", static_cast<uint32_t>(opt_tls_info->slot_count)); sb.AppendFormat(" Raw Data Address: %.8X\n", static_cast<uint32_t>(opt_tls_info->raw_data_address)); sb.AppendFormat(" Data Size: %d\n", static_cast<uint32_t>(opt_tls_info->data_size)); sb.AppendFormat(" Raw Data Size: %d\n", static_cast<uint32_t>(opt_tls_info->raw_data_size)); } break; case XEX_HEADER_DEFAULT_STACK_SIZE: { sb.AppendFormat(" XEX_HEADER_DEFAULT_STACK_SIZE: %d\n", static_cast<uint32_t>(opt_header.value)); } break; case XEX_HEADER_DEFAULT_FILESYSTEM_CACHE_SIZE: { sb.AppendFormat(" XEX_HEADER_DEFAULT_FILESYSTEM_CACHE_SIZE: %d\n", static_cast<uint32_t>(opt_header.value)); } break; case XEX_HEADER_DEFAULT_HEAP_SIZE: { sb.AppendFormat(" XEX_HEADER_DEFAULT_HEAP_SIZE: %d\n", static_cast<uint32_t>(opt_header.value)); } break; case XEX_HEADER_PAGE_HEAP_SIZE_AND_FLAGS: { sb.AppendFormat(" XEX_HEADER_PAGE_HEAP_SIZE_AND_FLAGS (TODO):\n"); } break; case XEX_HEADER_SYSTEM_FLAGS: { sb.AppendFormat(" XEX_HEADER_SYSTEM_FLAGS: %.8X\n", static_cast<uint32_t>(opt_header.value)); } break; case XEX_HEADER_EXECUTION_INFO: { sb.AppendFormat(" XEX_HEADER_EXECUTION_INFO:\n"); auto opt_exec_info = reinterpret_cast<const xex2_opt_execution_info*>(opt_header_ptr); sb.AppendFormat(" Media ID: %.8X\n", static_cast<uint32_t>(opt_exec_info->media_id)); sb.AppendFormat(" Title ID: %.8X\n", static_cast<uint32_t>(opt_exec_info->title_id)); sb.AppendFormat(" Savegame ID: %.8X\n", static_cast<uint32_t>(opt_exec_info->title_id)); sb.AppendFormat(" Disc Number / Total: %d / %d\n", opt_exec_info->disc_number, opt_exec_info->disc_count); } break; case XEX_HEADER_TITLE_WORKSPACE_SIZE: { sb.AppendFormat(" XEX_HEADER_TITLE_WORKSPACE_SIZE: %d\n", uint32_t(opt_header.value)); } break; case XEX_HEADER_GAME_RATINGS: { sb.AppendFormat(" XEX_HEADER_GAME_RATINGS (TODO):\n"); } break; case XEX_HEADER_LAN_KEY: { sb.AppendFormat(" XEX_HEADER_LAN_KEY:"); auto opt_lan_key = reinterpret_cast<const xex2_opt_lan_key*>(opt_header_ptr); for (int l = 0; l < 16; l++) { sb.AppendFormat(" %.2X", opt_lan_key->key[l]); } sb.Append("\n"); } break; case XEX_HEADER_XBOX360_LOGO: { sb.AppendFormat(" XEX_HEADER_XBOX360_LOGO (TODO):\n"); } break; case XEX_HEADER_MULTIDISC_MEDIA_IDS: { sb.AppendFormat(" XEX_HEADER_MULTIDISC_MEDIA_IDS (TODO):\n"); } break; case XEX_HEADER_ALTERNATE_TITLE_IDS: { sb.AppendFormat(" XEX_HEADER_ALTERNATE_TITLE_IDS (TODO):\n"); } break; case XEX_HEADER_ADDITIONAL_TITLE_MEMORY: { sb.AppendFormat(" XEX_HEADER_ADDITIONAL_TITLE_MEMORY: %d\n", uint32_t(opt_header.value)); } break; case XEX_HEADER_EXPORTS_BY_NAME: { sb.AppendFormat(" XEX_HEADER_EXPORTS_BY_NAME:\n"); auto dir = reinterpret_cast<const xex2_opt_data_directory*>(opt_header_ptr); auto exe_address = xex_module()->xex_security_info()->load_address; auto e = memory()->TranslateVirtual<const X_IMAGE_EXPORT_DIRECTORY*>( exe_address + dir->offset); auto e_base = reinterpret_cast<uintptr_t>(e); // e->AddressOfX RVAs are relative to the IMAGE_EXPORT_DIRECTORY! auto function_table = reinterpret_cast<const uint32_t*>(e_base + e->AddressOfFunctions); // Names relative to directory. auto name_table = reinterpret_cast<const uint32_t*>(e_base + e->AddressOfNames); // Table of ordinals (by name). auto ordinal_table = reinterpret_cast<const uint16_t*>( e_base + e->AddressOfNameOrdinals); for (uint32_t n = 0; n < e->NumberOfNames; n++) { auto name = reinterpret_cast<const char*>(e_base + name_table[n]); uint16_t ordinal = ordinal_table[n]; uint32_t addr = exe_address + function_table[ordinal]; sb.AppendFormat(" %-28s - %.3X - %.8X\n", name, ordinal, addr); } } break; default: { sb.AppendFormat(" Unknown Header %.8X\n", (uint32_t)opt_header.key); } break; } } sb.AppendFormat("Sections:\n"); for (uint32_t i = 0, page = 0; i < security_info->page_descriptor_count; i++) { // Manually byteswap the bitfield data. xex2_page_descriptor page_descriptor; page_descriptor.value = xe::byte_swap(security_info->page_descriptors[i].value); const char* type = "UNKNOWN"; switch (page_descriptor.info) { case XEX_SECTION_CODE: type = "CODE "; break; case XEX_SECTION_DATA: type = "RWDATA "; break; case XEX_SECTION_READONLY_DATA: type = "RODATA "; break; } const uint32_t page_size = security_info->load_address < 0x90000000 ? 64 * 1024 : 4 * 1024; uint32_t start_address = security_info->load_address + (page * page_size); uint32_t end_address = start_address + (page_descriptor.size * page_size); sb.AppendFormat(" %3u %s %3u pages %.8X - %.8X (%d bytes)\n", page, type, page_descriptor.size, start_address, end_address, page_descriptor.size * page_size); page += page_descriptor.size; } // Print out imports. // TODO(benvanik): figure out a way to remove dependency on old xex header. auto old_header = xe_xex2_get_header(xex_module()->xex()); sb.AppendFormat("Imports:\n"); for (size_t n = 0; n < old_header->import_library_count; n++) { const xe_xex2_import_library_t* library = &old_header->import_libraries[n]; xe_xex2_import_info_t* import_infos; size_t import_info_count; if (!xe_xex2_get_import_infos(xex_module()->xex(), library, &import_infos, &import_info_count)) { sb.AppendFormat(" %s - %lld imports\n", library->name, import_info_count); sb.AppendFormat(" Version: %d.%d.%d.%d\n", library->version.major, library->version.minor, library->version.build, library->version.qfe); sb.AppendFormat(" Min Version: %d.%d.%d.%d\n", library->min_version.major, library->min_version.minor,<|fim▁hole|> int known_count = 0; int unknown_count = 0; int impl_count = 0; int unimpl_count = 0; for (size_t m = 0; m < import_info_count; m++) { const xe_xex2_import_info_t* info = &import_infos[m]; if (kernel_state_->IsKernelModule(library->name)) { auto kernel_export = export_resolver->GetExportByOrdinal(library->name, info->ordinal); if (kernel_export) { known_count++; if (kernel_export->is_implemented()) { impl_count++; } else { unimpl_count++; } } else { unknown_count++; unimpl_count++; } } else { auto module = kernel_state_->GetModule(library->name); if (module) { uint32_t export_addr = module->GetProcAddressByOrdinal(info->ordinal); if (export_addr) { impl_count++; known_count++; } else { unimpl_count++; unknown_count++; } } else { unimpl_count++; unknown_count++; } } } float total_count = static_cast<float>(import_info_count) / 100.0f; sb.AppendFormat(" Total: %4llu\n", import_info_count); sb.AppendFormat(" Known: %3d%% (%d known, %d unknown)\n", static_cast<int>(known_count / total_count), known_count, unknown_count); sb.AppendFormat( " Implemented: %3d%% (%d implemented, %d unimplemented)\n", static_cast<int>(impl_count / total_count), impl_count, unimpl_count); sb.AppendFormat("\n"); // Listing. for (size_t m = 0; m < import_info_count; m++) { const xe_xex2_import_info_t* info = &import_infos[m]; const char* name = "UNKNOWN"; bool implemented = false; cpu::Export* kernel_export = nullptr; if (kernel_state_->IsKernelModule(library->name)) { kernel_export = export_resolver->GetExportByOrdinal(library->name, info->ordinal); if (kernel_export) { name = kernel_export->name; implemented = kernel_export->is_implemented(); } } else { auto module = kernel_state_->GetModule(library->name); if (module && module->GetProcAddressByOrdinal(info->ordinal)) { // TODO(benvanik): name lookup. implemented = true; } } if (kernel_export && kernel_export->type == cpu::Export::Type::kVariable) { sb.AppendFormat(" V %.8X %.3X (%3d) %s %s\n", info->value_address, info->ordinal, info->ordinal, implemented ? " " : "!!", name); } else if (info->thunk_address) { sb.AppendFormat(" F %.8X %.8X %.3X (%3d) %s %s\n", info->value_address, info->thunk_address, info->ordinal, info->ordinal, implemented ? " " : "!!", name); } } } sb.AppendFormat("\n"); } xe::LogLine('i', sb.GetString()); } } // namespace kernel } // namespace xe<|fim▁end|>
library->min_version.build, library->min_version.qfe); sb.AppendFormat("\n"); // Counts.
<|file_name|>central_planner_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import time from helpers import unittest import luigi.notifications from luigi.scheduler import DISABLED, DONE, FAILED, CentralPlannerScheduler luigi.notifications.DEBUG = True WORKER = 'myworker' class CentralPlannerTest(unittest.TestCase): def setUp(self): super(CentralPlannerTest, self).setUp() conf = self.get_scheduler_config() self.sch = CentralPlannerScheduler(**conf) self.time = time.time def get_scheduler_config(self): return { 'retry_delay': 100, 'remove_delay': 1000, 'worker_disconnect_delay': 10, 'disable_persist': 10, 'disable_window': 10, 'disable_failures': 3, } def tearDown(self): super(CentralPlannerTest, self).tearDown() if time.time != self.time: time.time = self.time def setTime(self, t): time.time = lambda: t def test_dep(self): self.sch.add_task(WORKER, 'B', deps=('A',)) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') self.sch.add_task(WORKER, 'A', status=DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'B') self.sch.add_task(WORKER, 'B', status=DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) def test_failed_dep(self): self.sch.add_task(WORKER, 'B', deps=('A',)) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') self.sch.add_task(WORKER, 'A', status=FAILED) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) # can still wait and retry: TODO: do we want this? self.sch.add_task(WORKER, 'A', DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'B') self.sch.add_task(WORKER, 'B', DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) def test_broken_dep(self): self.sch.add_task(WORKER, 'B', deps=('A',)) self.sch.add_task(WORKER, 'A', runnable=False) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) # can still wait and retry: TODO: do we want this? self.sch.add_task(WORKER, 'A', DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'B') self.sch.add_task(WORKER, 'B', DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) def test_two_workers(self): # Worker X wants to build A -> B # Worker Y wants to build A -> C self.sch.add_task(worker='X', task_id='A') self.sch.add_task(worker='Y', task_id='A') self.sch.add_task(task_id='B', deps=('A',), worker='X') self.sch.add_task(task_id='C', deps=('A',), worker='Y') self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') self.assertEqual(self.sch.get_work(worker='Y')['task_id'], None) # Worker Y is pending on A to be done self.sch.add_task(worker='X', task_id='A', status=DONE) self.assertEqual(self.sch.get_work(worker='Y')['task_id'], 'C') self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'B') def test_retry(self): # Try to build A but fails, will retry after 100s self.setTime(0) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') self.sch.add_task(WORKER, 'A', FAILED) for t in range(100): self.setTime(t) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) self.sch.ping(WORKER) if t % 10 == 0: self.sch.prune() self.setTime(101) self.sch.prune() self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') def test_disconnect_running(self): # X and Y wants to run A. # X starts but does not report back. Y does. # After some timeout, Y will build it instead self.setTime(0) self.sch.add_task(task_id='A', worker='X') self.sch.add_task(task_id='A', worker='Y') self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') for t in range(200): self.setTime(t) self.sch.ping(worker='Y') if t % 10 == 0: self.sch.prune() self.assertEqual(self.sch.get_work(worker='Y')['task_id'], 'A') def test_remove_dep(self): # X schedules A -> B, A is broken # Y schedules C -> B: this should remove A as a dep of B self.sch.add_task(task_id='A', worker='X', runnable=False) self.sch.add_task(task_id='B', deps=('A',), worker='X') # X can't build anything self.assertEqual(self.sch.get_work(worker='X')['task_id'], None) self.sch.add_task(task_id='B', deps=('C',), worker='Y') # should reset dependencies for A self.sch.add_task(task_id='C', worker='Y', status=DONE) self.assertEqual(self.sch.get_work(worker='Y')['task_id'], 'B') def test_timeout(self): # A bug that was earlier present when restarting the same flow self.setTime(0) self.sch.add_task(task_id='A', worker='X') self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') self.setTime(10000) self.sch.add_task(task_id='A', worker='Y') # Will timeout X but not schedule A for removal for i in range(2000): self.setTime(10000 + i) self.sch.ping(worker='Y') self.sch.add_task(task_id='A', status=DONE, worker='Y') # This used to raise an exception since A was removed def test_disallowed_state_changes(self): # Test that we can not schedule an already running task t = 'A' self.sch.add_task(task_id=t, worker='X') self.assertEqual(self.sch.get_work(worker='X')['task_id'], t) self.sch.add_task(task_id=t, worker='Y') self.assertEqual(self.sch.get_work(worker='Y')['task_id'], None) def test_two_worker_info(self): # Make sure the scheduler returns info that some other worker is running task A self.sch.add_task(worker='X', task_id='A') self.sch.add_task(worker='Y', task_id='A') self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') r = self.sch.get_work(worker='Y') self.assertEqual(r['task_id'], None) # Worker Y is pending on A to be done s = r['running_tasks'][0] self.assertEqual(s['task_id'], 'A') self.assertEqual(s['worker'], 'X') def test_assistant_get_work(self): self.sch.add_task(worker='X', task_id='A') self.sch.add_worker('Y', []) self.assertEqual(self.sch.get_work('Y', assistant=True)['task_id'], 'A') # check that the scheduler recognizes tasks as running running_tasks = self.sch.task_list('RUNNING', '') self.assertEqual(len(running_tasks), 1) self.assertEqual(list(running_tasks.keys()), ['A']) self.assertEqual(running_tasks['A']['worker_running'], 'Y') def test_assistant_get_work_external_task(self): self.sch.add_task('X', task_id='A', runnable=False) self.assertTrue(self.sch.get_work('Y', assistant=True)['task_id'] is None) def test_task_fails_when_assistant_dies(self): self.setTime(0) self.sch.add_task(worker='X', task_id='A') self.sch.add_worker('Y', []) self.assertEqual(self.sch.get_work('Y', assistant=True)['task_id'], 'A') self.assertEqual(list(self.sch.task_list('RUNNING', '').keys()), ['A']) # Y dies for 50 seconds, X stays alive self.setTime(50) self.sch.ping('X') self.assertEqual(list(self.sch.task_list('FAILED', '').keys()), ['A']) def test_prune_with_live_assistant(self): self.setTime(0) self.sch.add_task(worker='X', task_id='A') self.sch.get_work('Y', assistant=True) self.sch.add_task(worker='Y', task_id='A', status=DONE, assistant=True) # worker X stops communicating, A should be marked for removal self.setTime(600) self.sch.ping('Y') self.sch.prune() # A will now be pruned self.setTime(2000) self.sch.prune() self.assertFalse(list(self.sch.task_list('', ''))) def test_prune_done_tasks(self, expected=None): self.setTime(0) self.sch.add_task(WORKER, task_id='A', status=DONE) self.sch.add_task(WORKER, task_id='B', deps=['A'], status=DONE) self.sch.add_task(WORKER, task_id='C', deps=['B']) self.setTime(600) self.sch.ping('ASSISTANT') self.sch.prune() self.setTime(2000) self.sch.ping('ASSISTANT') self.sch.prune() self.assertEqual(set(expected or ()), set(self.sch.task_list('', '').keys())) def test_keep_tasks_for_assistant(self): self.sch.get_work('ASSISTANT', assistant=True) # tell the scheduler this is an assistant self.test_prune_done_tasks(['B', 'C']) def test_keep_scheduler_disabled_tasks_for_assistant(self): self.sch.get_work('ASSISTANT', assistant=True) # tell the scheduler this is an assistant # create a scheduler disabled task and a worker disabled task for i in range(10): self.sch.add_task(WORKER, 'D', status=FAILED) self.sch.add_task(WORKER, 'E', status=DISABLED) # scheduler prunes the worker disabled task self.assertEqual(set(['D', 'E']), set(self.sch.task_list(DISABLED, ''))) self.test_prune_done_tasks(['B', 'C', 'D']) def test_keep_failed_tasks_for_assistant(self): self.sch.get_work('ASSISTANT', assistant=True) # tell the scheduler this is an assistant self.sch.add_task(WORKER, 'D', status=FAILED, deps='A') self.test_prune_done_tasks(['A', 'B', 'C', 'D']) def test_scheduler_resources_none_allow_one(self): self.sch.add_task(worker='X', task_id='A', resources={'R1': 1}) self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') def test_scheduler_resources_none_disallow_two(self): self.sch.add_task(worker='X', task_id='A', resources={'R1': 2}) self.assertFalse(self.sch.get_work(worker='X')['task_id'], 'A') def test_scheduler_with_insufficient_resources(self): self.sch.add_task(worker='X', task_id='A', resources={'R1': 3}) self.sch.update_resources(R1=2) self.assertFalse(self.sch.get_work(worker='X')['task_id']) def test_scheduler_with_sufficient_resources(self): self.sch.add_task(worker='X', task_id='A', resources={'R1': 3}) self.sch.update_resources(R1=3) self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') def test_scheduler_with_resources_used(self): self.sch.add_task(worker='X', task_id='A', resources={'R1': 1}) self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') self.sch.add_task(worker='Y', task_id='B', resources={'R1': 1}) self.sch.update_resources(R1=1) self.assertFalse(self.sch.get_work(worker='Y')['task_id']) def test_scheduler_overprovisioned_on_other_resource(self): self.sch.add_task(worker='X', task_id='A', resources={'R1': 2}) self.sch.update_resources(R1=2) self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') self.sch.add_task(worker='Y', task_id='B', resources={'R2': 2}) self.sch.update_resources(R1=1, R2=2) self.assertEqual(self.sch.get_work(worker='Y')['task_id'], 'B') def test_scheduler_with_priority_and_competing_resources(self): self.sch.add_task(worker='X', task_id='A') self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A') self.sch.add_task(worker='X', task_id='B', resources={'R': 1}, priority=10) self.sch.add_task(worker='Y', task_id='C', resources={'R': 1}, priority=1) self.sch.update_resources(R=1) self.assertFalse(self.sch.get_work(worker='Y')['task_id']) self.sch.add_task(worker='Y', task_id='D', priority=0) self.assertEqual(self.sch.get_work(worker='Y')['task_id'], 'D') def test_do_not_lock_resources_when_not_ready(self): """ Test to make sure that resources won't go unused waiting on workers """ self.sch.add_task(worker='X', task_id='A', priority=10) self.sch.add_task(worker='X', task_id='B', resources={'R': 1}, priority=5) self.sch.add_task(worker='Y', task_id='C', resources={'R': 1}, priority=1) self.sch.update_resources(R=1) self.sch.add_worker('X', [('workers', 1)]) self.assertEqual('C', self.sch.get_work(worker='Y')['task_id']) def test_lock_resources_when_one_of_multiple_workers_is_ready(self): self.sch.add_task(worker='X', task_id='A', priority=10) self.sch.add_task(worker='X', task_id='B', resources={'R': 1}, priority=5) self.sch.add_task(worker='Y', task_id='C', resources={'R': 1}, priority=1) self.sch.update_resources(R=1) self.sch.add_worker('X', [('workers', 2)]) self.sch.add_worker('Y', []) self.assertFalse(self.sch.get_work('Y')['task_id']) def test_do_not_lock_resources_while_running_higher_priority(self): """ Test to make sure that resources won't go unused waiting on workers """ self.sch.add_task(worker='X', task_id='A', priority=10) self.sch.add_task(worker='X', task_id='B', resources={'R': 1}, priority=5) self.sch.add_task(worker='Y', task_id='C', resources={'R': 1}, priority=1) self.sch.update_resources(R=1) self.sch.add_worker('X', [('workers', 1)]) self.assertEqual('A', self.sch.get_work('X')['task_id']) self.assertEqual('C', self.sch.get_work('Y')['task_id']) def test_lock_resources_while_running_lower_priority(self): """ Make sure resources will be made available while working on lower priority tasks """ self.sch.add_task(worker='X', task_id='A', priority=4) self.assertEqual('A', self.sch.get_work('X')['task_id']) self.sch.add_task(worker='X', task_id='B', resources={'R': 1}, priority=5) self.sch.add_task(worker='Y', task_id='C', resources={'R': 1}, priority=1) self.sch.update_resources(R=1) self.sch.add_worker('X', [('workers', 1)]) self.assertFalse(self.sch.get_work('Y')['task_id']) def test_lock_resources_for_second_worker(self): self.sch.add_task(worker='X', task_id='A', resources={'R': 1}) self.sch.add_task(worker='X', task_id='B', resources={'R': 1}) self.sch.add_task(worker='Y', task_id='C', resources={'R': 1}, priority=10) self.sch.add_worker('X', {'workers': 2}) self.sch.add_worker('Y', {'workers': 1}) self.sch.update_resources(R=2) self.assertEqual('A', self.sch.get_work('X')['task_id']) self.assertFalse(self.sch.get_work('X')['task_id']) def test_can_work_on_lower_priority_while_waiting_for_resources(self): self.sch.add_task(worker='X', task_id='A', resources={'R': 1}, priority=0) self.assertEqual('A', self.sch.get_work('X')['task_id']) self.sch.add_task(worker='Y', task_id='B', resources={'R': 1}, priority=10) self.sch.add_task(worker='Y', task_id='C', priority=0) self.sch.update_resources(R=1) self.assertEqual('C', self.sch.get_work('Y')['task_id']) def test_priority_update_with_pruning(self): self.setTime(0) self.sch.add_task(task_id='A', worker='X') self.setTime(50) # after worker disconnects self.sch.prune() self.sch.add_task(task_id='B', deps=['A'], worker='X') self.setTime(2000) # after remove for task A self.sch.prune() # Here task A that B depends on is missing self.sch.add_task(WORKER, task_id='C', deps=['B'], priority=100) self.sch.add_task(WORKER, task_id='B', deps=['A']) self.sch.add_task(WORKER, task_id='A') self.sch.add_task(WORKER, task_id='D', priority=10) self.check_task_order('ABCD') def test_update_resources(self): self.sch.add_task(WORKER, task_id='A', deps=['B']) self.sch.add_task(WORKER, task_id='B', resources={'r': 2}) self.sch.update_resources(r=1) # B requires too many resources, we can't schedule self.check_task_order([]) self.sch.add_task(WORKER, task_id='B', resources={'r': 1}) # now we have enough resources self.check_task_order(['B', 'A']) def test_hendle_multiple_resources(self): self.sch.add_task(WORKER, task_id='A', resources={'r1': 1, 'r2': 1}) self.sch.add_task(WORKER, task_id='B', resources={'r1': 1, 'r2': 1}) self.sch.add_task(WORKER, task_id='C', resources={'r1': 1}) self.sch.update_resources(r1=2, r2=1) self.assertEqual('A', self.sch.get_work(WORKER)['task_id']) self.check_task_order('C') def test_single_resource_lock(self): self.sch.add_task('X', task_id='A', resources={'r': 1}) self.assertEqual('A', self.sch.get_work('X')['task_id']) self.sch.add_task(WORKER, task_id='B', resources={'r': 2}, priority=10) self.sch.add_task(WORKER, task_id='C', resources={'r': 1}) self.sch.update_resources(r=2) # Should wait for 2 units of r to be available for B before scheduling C self.check_task_order([]) def test_no_lock_if_too_many_resources_required(self): self.sch.add_task(WORKER, task_id='A', resources={'r': 2}, priority=10) self.sch.add_task(WORKER, task_id='B', resources={'r': 1}) self.sch.update_resources(r=1) self.check_task_order('B') def test_multiple_resources_lock(self): self.sch.add_task('X', task_id='A', resources={'r1': 1, 'r2': 1}, priority=10) self.sch.add_task(WORKER, task_id='B', resources={'r2': 1}) self.sch.add_task(WORKER, task_id='C', resources={'r1': 1}) self.sch.update_resources(r1=1, r2=1) # should preserve both resources for worker 'X' self.check_task_order([]) def test_multiple_resources_no_lock(self): self.sch.add_task(WORKER, task_id='A', resources={'r1': 1}, priority=10) self.sch.add_task(WORKER, task_id='B', resources={'r1': 1, 'r2': 1}, priority=10) self.sch.add_task(WORKER, task_id='C', resources={'r2': 1}) self.sch.update_resources(r1=1, r2=2) self.assertEqual('A', self.sch.get_work(WORKER)['task_id']) # C doesn't block B, so it can go first self.check_task_order('C') def check_task_order(self, order): for expected_id in order: self.assertEqual(self.sch.get_work(WORKER)['task_id'], expected_id) self.sch.add_task(WORKER, expected_id, status=DONE) self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) def test_priorities(self): self.sch.add_task(WORKER, 'A', priority=10) self.sch.add_task(WORKER, 'B', priority=5) self.sch.add_task(WORKER, 'C', priority=15) self.sch.add_task(WORKER, 'D', priority=9) self.check_task_order(['C', 'A', 'D', 'B']) def test_priorities_default_and_negative(self): self.sch.add_task(WORKER, 'A', priority=10) self.sch.add_task(WORKER, 'B') self.sch.add_task(WORKER, 'C', priority=15) self.sch.add_task(WORKER, 'D', priority=-20) self.sch.add_task(WORKER, 'E', priority=1) self.check_task_order(['C', 'A', 'E', 'B', 'D']) def test_priorities_and_dependencies(self): self.sch.add_task(WORKER, 'A', deps=['Z'], priority=10) self.sch.add_task(WORKER, 'B', priority=5) self.sch.add_task(WORKER, 'C', deps=['Z'], priority=3) self.sch.add_task(WORKER, 'D', priority=2) self.sch.add_task(WORKER, 'Z', priority=1) self.check_task_order(['Z', 'A', 'B', 'C', 'D']) def test_priority_update_dependency_after_scheduling(self): self.sch.add_task(WORKER, 'A', priority=1) self.sch.add_task(WORKER, 'B', priority=5, deps=['A']) self.sch.add_task(WORKER, 'C', priority=10, deps=['B']) self.sch.add_task(WORKER, 'D', priority=6) self.check_task_order(['A', 'B', 'C', 'D']) def test_disable(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) # should be disabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 1) self.assertEqual(len(self.sch.task_list('FAILED', '')), 0) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) def test_disable_and_reenable(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) # should be disabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 1) self.assertEqual(len(self.sch.task_list('FAILED', '')), 0) self.sch.re_enable_task('A') # should be enabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 0) self.assertEqual(len(self.sch.task_list('FAILED', '')), 1) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') def test_disable_and_reenable_and_disable_again(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) # should be disabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 1) self.assertEqual(len(self.sch.task_list('FAILED', '')), 0) self.sch.re_enable_task('A') # should be enabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 0) self.assertEqual(len(self.sch.task_list('FAILED', '')), 1) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') self.sch.add_task(WORKER, 'A', status=FAILED) # should be still enabled self.assertEqual(len(self.sch.task_list('DISABLED', '')), 0) self.assertEqual(len(self.sch.task_list('FAILED', '')), 1) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) # should be disabled now self.assertEqual(len(self.sch.task_list('DISABLED', '')), 1) self.assertEqual(len(self.sch.task_list('FAILED', '')), 0) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], None) def test_disable_and_done(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'A', status=FAILED) # should be disabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 1) self.assertEqual(len(self.sch.task_list('FAILED', '')), 0)<|fim▁hole|> self.sch.add_task(WORKER, 'A', status=DONE) # should be enabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 0) self.assertEqual(len(self.sch.task_list('DONE', '')), 1) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') def test_disable_by_worker(self): self.sch.add_task(WORKER, 'A', status=DISABLED) self.assertEqual(len(self.sch.task_list('DISABLED', '')), 1) self.sch.add_task(WORKER, 'A') # should be enabled at this point self.assertEqual(len(self.sch.task_list('DISABLED', '')), 0) self.sch.add_task(WORKER, 'A') self.assertEqual(self.sch.get_work(WORKER)['task_id'], 'A') def test_task_list_beyond_limit(self): sch = CentralPlannerScheduler(max_shown_tasks=3) for c in 'ABCD': sch.add_task(WORKER, c) self.assertEqual(set('ABCD'), set(sch.task_list('PENDING', '', False).keys())) self.assertEqual({'num_tasks': 4}, sch.task_list('PENDING', '')) def test_task_list_within_limit(self): sch = CentralPlannerScheduler(max_shown_tasks=4) for c in 'ABCD': sch.add_task(WORKER, c) self.assertEqual(set('ABCD'), set(sch.task_list('PENDING', '').keys())) def test_task_lists_some_beyond_limit(self): sch = CentralPlannerScheduler(max_shown_tasks=3) for c in 'ABCD': sch.add_task(WORKER, c, 'DONE') for c in 'EFG': sch.add_task(WORKER, c) self.assertEqual(set('EFG'), set(sch.task_list('PENDING', '').keys())) self.assertEqual({'num_tasks': 4}, sch.task_list('DONE', '')) def test_priority_update_dependency_chain(self): self.sch.add_task(WORKER, 'A', priority=10, deps=['B']) self.sch.add_task(WORKER, 'B', priority=5, deps=['C']) self.sch.add_task(WORKER, 'C', priority=1) self.sch.add_task(WORKER, 'D', priority=6) self.check_task_order(['C', 'B', 'A', 'D']) def test_priority_no_decrease_with_multiple_updates(self): self.sch.add_task(WORKER, 'A', priority=1) self.sch.add_task(WORKER, 'B', priority=10, deps=['A']) self.sch.add_task(WORKER, 'C', priority=5, deps=['A']) self.sch.add_task(WORKER, 'D', priority=6) self.check_task_order(['A', 'B', 'D', 'C']) def test_unique_tasks(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'B') self.sch.add_task(WORKER, 'C') self.sch.add_task(WORKER + "_2", 'B') response = self.sch.get_work(WORKER) self.assertEqual(3, response['n_pending_tasks']) self.assertEqual(2, response['n_unique_pending']) def test_pending_downstream_disable(self): self.sch.add_task(WORKER, 'A', status=DISABLED) self.sch.add_task(WORKER, 'B', deps=('A',)) self.sch.add_task(WORKER, 'C', deps=('B',)) response = self.sch.get_work(WORKER) self.assertTrue(response['task_id'] is None) self.assertEqual(0, response['n_pending_tasks']) self.assertEqual(0, response['n_unique_pending']) def test_pending_downstream_failure(self): self.sch.add_task(WORKER, 'A', status=FAILED) self.sch.add_task(WORKER, 'B', deps=('A',)) self.sch.add_task(WORKER, 'C', deps=('B',)) response = self.sch.get_work(WORKER) self.assertTrue(response['task_id'] is None) self.assertEqual(2, response['n_pending_tasks']) self.assertEqual(2, response['n_unique_pending']) def test_prefer_more_dependents(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'B') self.sch.add_task(WORKER, 'C', deps=['B']) self.sch.add_task(WORKER, 'D', deps=['B']) self.sch.add_task(WORKER, 'E', deps=['A']) self.check_task_order('BACDE') def test_prefer_readier_dependents(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'B') self.sch.add_task(WORKER, 'C') self.sch.add_task(WORKER, 'D') self.sch.add_task(WORKER, 'F', deps=['A', 'B', 'C']) self.sch.add_task(WORKER, 'G', deps=['A', 'B', 'C']) self.sch.add_task(WORKER, 'E', deps=['D']) self.check_task_order('DABCFGE') def test_ignore_done_dependents(self): self.sch.add_task(WORKER, 'A') self.sch.add_task(WORKER, 'B') self.sch.add_task(WORKER, 'C') self.sch.add_task(WORKER, 'D', priority=1) self.sch.add_task(WORKER, 'E', deps=['C', 'D']) self.sch.add_task(WORKER, 'F', deps=['A', 'B']) self.check_task_order('DCABEF') def test_task_list_no_deps(self): self.sch.add_task(WORKER, 'B', deps=('A',)) self.sch.add_task(WORKER, 'A') task_list = self.sch.task_list('PENDING', '') self.assertFalse('deps' in task_list['A']) if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>Event_Generator_Base.H<|end_file_name|><|fim▁begin|>#ifndef SHRIMPS_EVENT_GENERATOR_BASE_H #define SHRIMPS_EVENT_GENERATOR_BASE_H #include "SHRiMPS/Eikonals/Omega_ik.H" namespace SHRIMPS { class Event_Generator_Base { protected: Omega_ik * p_eikonal; double m_smin; public: Event_Generator_Base(): p_eikonal(NULL),m_smin(0.) {} ~Event_Generator_Base() {}; virtual Omega_ik * GetEikonal() const { return p_eikonal;} virtual double Smin() const { return m_smin;} virtual bool IsLastRescatter() const { return false; }<|fim▁hole|> virtual int NLadders() const { return 1; } }; } #endif<|fim▁end|>
virtual double TMax() const { return 0.; }
<|file_name|>RedBlackIntegerMap.java<|end_file_name|><|fim▁begin|>package org.katlas.JavaKh.rows; import org.katlas.JavaKh.utils.RedBlackIntegerTree; public class RedBlackIntegerMap<F> extends RedBlackIntegerTree<F> implements MatrixRow<F> { /** * */ private static final long serialVersionUID = 5885667469881867107L; public void compact() { } public void putLast(int key, F f) { put(key, f); } @Override public void put(int key, F value) { if(value == null) { remove(key); } else { super.put(key, value); } } <|fim▁hole|><|fim▁end|>
}
<|file_name|>Table1.cpp<|end_file_name|><|fim▁begin|>//include files #include "Table1.h" namespace hoge{ /* * This class is generated automatically. * Never change from your hand. */ /** * <pre> * schema name : "tiny_query_helper_test" * table name : "table1" * remarks : ""<|fim▁hole|> * </pre> */ //実体を定義 //カラム情報オブジェクトを定義 const std::string Table1::column::id::name_ ("id"); const std::string Table1::column::data1_int::name_ ("data1_int"); const std::string Table1::column::data2_string::name_ ("data2_string"); //カラム情報の実態を定義 const typename Table1::column::id Table1::column::id; const typename Table1::column::data1_int Table1::column::data1_int; const typename Table1::column::data2_string Table1::column::data2_string; }<|fim▁end|>
* * Persistable: * Available to use Persistor-methods, like DBManager\#insert(IPersistable),\#update(IPersistable),\#delete(IPersistable),else. *
<|file_name|>test.py<|end_file_name|><|fim▁begin|>from __future__ import print_function import sys sys.path.append('..') # help python find cyton.py relative to scripts folder from openbci import cyton as bci import logging import time def printData(sample): # os.system('clear') print("----------------") print("%f" % (sample.id)) print(sample.channel_data) print(sample.aux_data) print("----------------") if __name__ == '__main__': # port = '/dev/tty.OpenBCI-DN008VTF'<|fim▁hole|> baud = 115200 logging.basicConfig(filename="test.log", format='%(asctime)s - %(levelname)s : %(message)s', level=logging.DEBUG) logging.info('---------LOG START-------------') board = bci.OpenBCICyton(port=port, scaled_output=False, log=True) print("Board Instantiated") board.ser.write('v') time.sleep(10) board.start_streaming(printData) board.print_bytes_in()<|fim▁end|>
port = '/dev/tty.usbserial-DB00JAM0' # port = '/dev/tty.OpenBCI-DN0096XA'
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011-2012 Domsense s.r.l. (<http://www.domsense.com>). # Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "VAT on payment", "version": "2.0", 'category': 'Generic Modules/Accounting',<|fim▁hole|> To activate the VAT on payment behaviour, this module adds a checkbox on invoice form: 'Vat on payment' Moreover, three things have to be configured: - On account object, Related account used for real registrations on a VAT on payment basis - On journal object, Related journal used for shadow registrations on a VAT on payment basis - On tax code object, Related tax code used for real registrations on a VAT on payment basis Requirements: https://docs.google.com/spreadsheet/ccc?key=0Aodwq17jxF4edDJaZ2dOQkVEN0hodEtfRmpVdlg2Vnc#gid=0 Howto: http://planet.domsense.com/en/2012/10/vat-on-payment-treatment-with-openerp/ """, 'website': 'http://www.agilebg.com', 'init_xml': [], 'update_xml': [ 'account_view.xml', 'company_view.xml', ], 'demo_xml': [], # TODO YAML tests 'installable': True, 'active': False, }<|fim▁end|>
"depends": ["account_voucher_cash_basis"], "author": "Agile Business Group", "description": """ See 'account_voucher_cash_basis' description.
<|file_name|>ScatterCanvas.java<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.jtex.plot; /** * * @author hios */ public interface ScatterCanvas {<|fim▁hole|> public void setScatterOptions(ScatterOptions options); }<|fim▁end|>
public ScatterOptions getScatterOptions();
<|file_name|>739_daily-temperatures.py<|end_file_name|><|fim▁begin|>class Solution: def dailyTemperatures(self, T): ans = [] m = [None]*101 for i in range(len(T)-1, -1, -1): x = T[i] m[x] = i ans.append(min([x for x in m[x+1:] if x is not None], default=i)-i) ans.reverse() return ans<|fim▁hole|><|fim▁end|>
print(Solution().dailyTemperatures([73, 74, 75, 71, 69, 72, 76, 73]))
<|file_name|>AssemblyConnector.java<|end_file_name|><|fim▁begin|>/** * <copyright> * </copyright> * * $Id$ */ package kieker.tools.slastic.metamodel.componentAssembly; import kieker.tools.slastic.metamodel.core.FQNamedEntity; import kieker.tools.slastic.metamodel.typeRepository.ConnectorType; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Assembly Connector</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link kieker.tools.slastic.metamodel.componentAssembly.AssemblyConnector#getConnectorType <em>Connector Type</em>}</li> * </ul> * </p> * * @see kieker.tools.slastic.metamodel.componentAssembly.ComponentAssemblyPackage#getAssemblyConnector() * @model abstract="true" * @generated */ public interface AssemblyConnector extends FQNamedEntity { /** * Returns the value of the '<em><b>Connector Type</b></em>' reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Connector Type</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Connector Type</em>' reference. * @see #setConnectorType(ConnectorType) * @see kieker.tools.slastic.metamodel.componentAssembly.ComponentAssemblyPackage#getAssemblyConnector_ConnectorType()<|fim▁hole|> * @model required="true" ordered="false" * @generated */ ConnectorType getConnectorType(); /** * Sets the value of the '{@link kieker.tools.slastic.metamodel.componentAssembly.AssemblyConnector#getConnectorType <em>Connector Type</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Connector Type</em>' reference. * @see #getConnectorType() * @generated */ void setConnectorType(ConnectorType value); } // AssemblyConnector<|fim▁end|>
<|file_name|>strategy.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # -*- coding: utf-8 -*- """ Created on Fri Jan 27 18:31:59 2017 @author: katsuya.ishiyama """ from numpy import random # Definition of module level constants SUCCESS_CODE = 1 FAILURE_CODE = 0 class Strategy(): def __init__(self, n): _success_probability = _generate_success_probability(n) _strategy = {i: p for i, p in enumerate(_success_probability, 1)} self._n = n self.strategy = _strategy self.stock_of_strategy = list(_strategy.keys()) self.tried_strategy = [] self.current_strategy = None self.previous_strategy = None self.count_same_strategy = 0 self._result_of_trial = None def choose_strategy(self): if not self.stock_of_strategy: raise ValueError('There is no strategy in stock.') _chosen_id = random.choice(self.stock_of_strategy, 1)[0] self.previous_strategy = self.current_strategy self.current_strategy = _chosen_id self.count_same_strategy = 0 self.stock_of_strategy.remove(_chosen_id) _chosen_strategy = { 'chosen_strategy': _chosen_id, 'success_probability': self._get_success_probability() } return _chosen_strategy def _get_success_probability(self): return self.strategy[self.current_strategy] def try_strategy(self): if not self.current_strategy: raise ValueError('No strategy is chosen.') <|fim▁hole|> self._result_of_trial = _get_trial_result( p=self._get_success_probability() ) if self.current_strategy == self.previous_strategy: self.count_same_strategy += 1 return self._result_of_trial def _get_trial_result(p): _trial_result = random.choice([FAILURE_CODE, SUCCESS_CODE], size=1, p=[1 - p, p]) return _trial_result[0] def _generate_success_probability(size): return random.sample(size)<|fim▁end|>
self.tried_strategy.append(self.current_strategy)
<|file_name|>UtilCellInfoFormatter.java<|end_file_name|><|fim▁begin|>package edu.towson.cis.cosc603.project2.monopoly.gui; import edu.towson.cis.cosc603.project2.monopoly.Cell; import edu.towson.cis.cosc603.project2.monopoly.Player; import edu.towson.cis.cosc603.project2.monopoly.UtilityCell; // TODO: Auto-generated Javadoc /** * The Class UtilCellInfoFormatter. */ public class UtilCellInfoFormatter extends OwnerName implements CellInfoFormatter { /* (non-Javadoc) * @see edu.towson.cis.cosc603.project2.monopoly.gui.CellInfoFormatter#format(edu.towson.cis.cosc603.project2.monopoly.Cell) */<|fim▁hole|> StringBuffer buf = new StringBuffer(); String ownerName = getOwnerName(cell); buf.append("<html><b><font color='olive'>") .append(cell.getName()) .append("</font></b><br>") .append("$").append(c.getPrice()) .append("<br>Owner: ").append(ownerName) .append("</html>"); return buf.toString(); } }<|fim▁end|>
public String format(Cell cell) { UtilityCell c = (UtilityCell)cell;
<|file_name|>bitcoin_sr.ts<|end_file_name|><|fim▁begin|><TS language="sr" version="2.1"> <context><|fim▁hole|> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation>Десни клик за измену адресе или ознаке</translation> </message> <message> <source>Create a new address</source> <translation>Направи нову адресу</translation> </message> <message> <source>&amp;New</source> <translation>&amp;Ново</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Копирај тренутно одабрану адресу</translation> </message> <message> <source>&amp;Copy</source> <translation>&amp;Копирај</translation> </message> <message> <source>C&amp;lose</source> <translation>&amp;Затвори</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Обриши тренутно одабрану адресу са листе</translation> </message> <message> <source>Enter address or label to search</source> <translation>Унеси адресу или назив ознаке за претрагу</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Извези податке из одабране картице у датотеку</translation> </message> <message> <source>&amp;Export</source> <translation>&amp;Извези</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Обриши</translation> </message> <message> <source>Choose the address to send coins to</source> <translation>Одабери адресу за слање</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation>Одабери адресу за примање</translation> </message> <message> <source>C&amp;hoose</source> <translation>&amp;Одабери</translation> </message> <message> <source>Sending addresses</source> <translation>Адресе за слање</translation> </message> <message> <source>Receiving addresses</source> <translation>Адресе за примање</translation> </message> <message> <source>These are your Particl addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Ово су твоје Биткоин адресе за слање уплата. Увек добро провери износ и адресу на коју шаљеш пре него што пошаљеш уплату.</translation> </message> <message> <source>These are your Particl addresses for receiving payments. Use the 'Create new receiving address' button in the receive tab to create new addresses. Signing is only possible with addresses of the type 'legacy'.</source> <translation>Ово су твоје Биткоин адресе за приманје уплата. Користи дугме „Направи нову адресу за примање” у картици за примање за креирање нових адреса. Потписивање је могуђе само за адресе типа 'legacy'.</translation> </message> <message> <source>&amp;Copy Address</source> <translation>&amp;Копирај Адресу</translation> </message> <message> <source>Copy &amp;Label</source> <translation>Копирај &amp; Обележи</translation> </message> <message> <source>&amp;Edit</source> <translation>&amp;Измени</translation> </message> <message> <source>Export Address List</source> <translation>Извези Листу Адреса</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Зарезом одвојене вредности (*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation>Извоз Неуспешан</translation> </message> <message> <source>There was an error trying to save the address list to %1. Please try again.</source> <translation>Десила се грешка приликом покушаја да се листа адреса упамти на %1. Молимо покушајте поново.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Ознака</translation> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>(no label)</source> <translation>(без ознаке)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Прозор за унос лозинке</translation> </message> <message> <source>Enter passphrase</source> <translation>Унеси лозинку</translation> </message> <message> <source>New passphrase</source> <translation>Нова лозинка</translation> </message> <message> <source>Repeat new passphrase</source> <translation>Понови нову лозинку</translation> </message> <message> <source>Show passphrase</source> <translation>Прикажи лозинку</translation> </message> <message> <source>Encrypt wallet</source> <translation>Шифрирај новчаник</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ова операција захтева да унесеш лозинку новчаника како би се новчаник откључао.</translation> </message> <message> <source>Unlock wallet</source> <translation>Откључај новчаник</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ова операција захтева да унесеш лозинку новчаника како би новчаник био дешифрован.</translation> </message> <message> <source>Decrypt wallet</source> <translation>Дешифруј новчаник</translation> </message> <message> <source>Change passphrase</source> <translation>Измени лозинку</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>Потврди шифрирање новчаника</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR PARTICL&lt;/b&gt;!</source> <translation>Упозорење: Уколико шифрираш новчаник и изгубиш своју лозинку, &lt;b&gt;ИЗГУБИЋЕШ СВЕ СВОЈЕ БИТКОИНЕ&lt;/b&gt;!</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Да ли сте сигурни да желите да шифрирате свој новчаник?</translation> </message> <message> <source>Wallet encrypted</source> <translation>Новчаник шифриран</translation> </message> <message> <source>Enter the new passphrase for the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Унеси нову лозинку за новчаник&lt;br/&gt;Молимо користи лозинку од десет или више насумичних карактера&lt;b&gt;,или&lt;b&gt;осам или више речи&lt;/b&gt;.</translation> </message> <message> <source>Enter the old passphrase and new passphrase for the wallet.</source> <translation>Унеси стару лозинку и нову лозинку новчаника.</translation> </message> <message> <source>Remember that encrypting your wallet cannot fully protect your particl from being stolen by malware infecting your computer.</source> <translation>Упамти, шифрирање новчаника не може у потуности заштити твоје биткоине од крађе од стране малвера инфицира твој рачунар.</translation> </message> <message> <source>Wallet to be encrypted</source> <translation>Новчаник за шифрирање</translation> </message> <message> <source>Your wallet is about to be encrypted. </source> <translation>Твој новчаник биће шифриран.</translation> </message> <message> <source>Your wallet is now encrypted. </source> <translation>Твој новчаник сада је шифриран.</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>ВАЖНО: Свакa претходнa резерва новчаника коју сте имали треба да се замени новим, шифрираним фајлом новчаника. Из сигурносних разлога, свака претходна резерва нешифрираног фајла новчаника постаће сувишна, чим почнете да користите нови, шифрирани новчаник.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Шифрирање новчаника неуспешно.</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Шифрирање новчаника није успело због интерне грешке. Ваш новчаник није шифриран.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>Лозинке које сте унели нису исте.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Отључавање новчаника није успело.</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Лозинка коју сте унели за дешифровање новчаника је погрешна.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Дешифровање новчаника неуспешно.</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>Лозинка новчаника успешно је промењена.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Упозорање Caps Lock дугме укључено!</translation> </message> </context> <context> <name>BanTableModel</name> <message> <source>IP/Netmask</source> <translation>ИП/Нетмаск</translation> </message> <message> <source>Banned Until</source> <translation>Забрањен до</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <source>Sign &amp;message...</source> <translation>Потпиши &amp;поруку...</translation> </message> <message> <source>Synchronizing with network...</source> <translation>Синхронизација са мрежом у току...</translation> </message> <message> <source>&amp;Overview</source> <translation>&amp;Општи преглед</translation> </message> <message> <source>Show general overview of wallet</source> <translation>Погледајте општи преглед новчаника</translation> </message> <message> <source>&amp;Transactions</source> <translation>&amp;Трансакције</translation> </message> <message> <source>Browse transaction history</source> <translation>Претражите историјат трансакција</translation> </message> <message> <source>E&amp;xit</source> <translation>И&amp;злаз</translation> </message> <message> <source>Quit application</source> <translation>Напустите програм</translation> </message> <message> <source>&amp;About %1</source> <translation>&amp;О %1</translation> </message> <message> <source>Show information about %1</source> <translation>Прикажи информације о %1</translation> </message> <message> <source>About &amp;Qt</source> <translation>О &amp;Qt-у</translation> </message> <message> <source>Show information about Qt</source> <translation>Прегледај информације о Qt-у</translation> </message> <message> <source>&amp;Options...</source> <translation>П&amp;оставке...</translation> </message> <message> <source>Modify configuration options for %1</source> <translation>Измени конфигурацију поставки за %1</translation> </message> <message> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Шифровање новчаника...</translation> </message> <message> <source>&amp;Backup Wallet...</source> <translation>&amp;Резерна копија новчаника</translation> </message> <message> <source>&amp;Change Passphrase...</source> <translation>&amp; Промени лозинку...</translation> </message> <message> <source>Open &amp;URI...</source> <translation>Отвори &amp;URI...</translation> </message> <message> <source>Create Wallet...</source> <translation>Направи Новчаник...</translation> </message> <message> <source>Create a new wallet</source> <translation>Направи нови ночаник</translation> </message> <message> <source>Wallet:</source> <translation>Новчаник:</translation> </message> <message> <source>Click to disable network activity.</source> <translation>Кликни да искључиш активност на мрежи.</translation> </message> <message> <source>Network activity disabled.</source> <translation>Активност на мрежи искључена.</translation> </message> <message> <source>Click to enable network activity again.</source> <translation>Кликни да поново омогућиш активност на мрежи.</translation> </message> <message> <source>Syncing Headers (%1%)...</source> <translation>Синхронизовање Заглавља (%1%)...</translation> </message> <message> <source>Reindexing blocks on disk...</source> <translation>Поново идексирање блокова на диску...</translation> </message> <message> <source>Proxy is &lt;b&gt;enabled&lt;/b&gt;: %1</source> <translation>Прокси је &lt;b&gt;омогућен&lt;/b&gt;: %1</translation> </message> <message> <source>Send coins to a Particl address</source> <translation>Пошаљи новац на Биткоин адресу</translation> </message> <message> <source>Backup wallet to another location</source> <translation>Направи резервну копију новчаника на другој локацији</translation> </message> <message> <source>Change the passphrase used for wallet encryption</source> <translation>Мењање лозинке којом се шифрује новчаник</translation> </message> <message> <source>&amp;Verify message...</source> <translation>&amp;Верификовање поруке...</translation> </message> <message> <source>&amp;Send</source> <translation>&amp;Пошаљи</translation> </message> <message> <source>&amp;Receive</source> <translation>&amp;Прими</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>&amp;Прикажи / Сакриј</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Прикажи или сакрији главни прозор</translation> </message> <message> <source>Encrypt the private keys that belong to your wallet</source> <translation>Шифрирај приватни клуљ који припада новчанику.</translation> </message> <message> <source>Sign messages with your Particl addresses to prove you own them</source> <translation>Потписуј поруку са своје Биткоин адресе као доказ да си њихов власник</translation> </message> <message> <source>Verify messages to ensure they were signed with specified Particl addresses</source> <translation>Верификуј поруке и утврди да ли су потписане од стране спецификованих Биткоин адреса</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Фајл</translation> </message> <message> <source>&amp;Settings</source> <translation>&amp;Подешавања</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Помоћ</translation> </message> <message> <source>Tabs toolbar</source> <translation>Трака са картицама</translation> </message> <message> <source>Request payments (generates QR codes and particl: URIs)</source> <translation>Затражи плаћање (генерише QR кодове и биткоин: URI-е)</translation> </message> <message> <source>Show the list of used sending addresses and labels</source> <translation>Прегледајте листу коришћених адреса и етикета за слање уплата</translation> </message> <message> <source>Show the list of used receiving addresses and labels</source> <translation>Прегледајте листу коришћених адреса и етикета за пријем уплата</translation> </message> <message> <source>&amp;Command-line options</source> <translation>&amp;Опције командне линије</translation> </message> <message numerus="yes"> <source>%n active connection(s) to Particl network</source> <translation><numerusform>%n aктивна веза са Биткоин мрежом</numerusform><numerusform>%n aктивних веза са Биткоин мрежом</numerusform><numerusform>%n aктивних веза са Биткоин мрежом</numerusform></translation> </message> <message> <source>Indexing blocks on disk...</source> <translation>Идексирање блокова на диску...</translation> </message> <message> <source>Processing blocks on disk...</source> <translation>Обрада блокова на диску...</translation> </message> <message numerus="yes"> <source>Processed %n block(s) of transaction history.</source> <translation><numerusform>Обрађенo %n блокова историјата трансакција.</numerusform><numerusform>Обрађенo %n блокова историјата трансакција.</numerusform><numerusform>Обрађенo је %n блокова историјата трансакција.</numerusform></translation> </message> <message> <source>%1 behind</source> <translation>%1 уназад</translation> </message> <message> <source>Last received block was generated %1 ago.</source> <translation>Последњи примљени блок је направљен пре %1.</translation> </message> <message> <source>Transactions after this will not yet be visible.</source> <translation>Трансакције након овога још неће бити видљиве.</translation> </message> <message> <source>Error</source> <translation>Грешка</translation> </message> <message> <source>Warning</source> <translation>Упозорење</translation> </message> <message> <source>Information</source> <translation>Информације</translation> </message> <message> <source>Up to date</source> <translation>Ажурирано</translation> </message> <message> <source>Node window</source> <translation>Ноде прозор</translation> </message> <message> <source>Open node debugging and diagnostic console</source> <translation>Отвори конзолу за ноде дебуг и дијагностику</translation> </message> <message> <source>&amp;Sending addresses</source> <translation>&amp;Адресе за слање</translation> </message> <message> <source>&amp;Receiving addresses</source> <translation>&amp;Адресе за примање</translation> </message> <message> <source>Open a particl: URI</source> <translation>Отвори биткоин: URI</translation> </message> <message> <source>Open Wallet</source> <translation>Отвори новчаник</translation> </message> <message> <source>Open a wallet</source> <translation>Отвори новчаник</translation> </message> <message> <source>Close Wallet...</source> <translation>Затвори новчаник...</translation> </message> <message> <source>Close wallet</source> <translation>Затвори новчаник</translation> </message> <message> <source>Close all wallets</source> <translation>Затвори све новчанике</translation> </message> <message> <source>Show the %1 help message to get a list with possible Particl command-line options</source> <translation>Прикажи поруку помоћи %1 за листу са могућим опцијама Биткоин командне линије</translation> </message> <message> <source>default wallet</source> <translation>подразумевани новчаник</translation> </message> <message> <source>No wallets available</source> <translation>Нема доступних новчаника</translation> </message> <message> <source>Minimize</source> <translation>Умањи</translation> </message> <message> <source>Zoom</source> <translation>Увећај</translation> </message> <message> <source>Main Window</source> <translation>Главни прозор</translation> </message> <message> <source>%1 client</source> <translation>%1 клијент</translation> </message> <message> <source>Connecting to peers...</source> <translation>Повезивање са клијентима...</translation> </message> <message> <source>Catching up...</source> <translation>Ажурирање у току...</translation> </message> <message> <source>Error: %1</source> <translation>Грешка: %1</translation> </message> <message> <source>Warning: %1</source> <translation>Упозорење: %1</translation> </message> <message> <source>Date: %1 </source> <translation>Датум: %1 </translation> </message> <message> <source>Amount: %1 </source> <translation>Износ: %1 </translation> </message> <message> <source>Wallet: %1 </source> <translation>Новчаник: %1 </translation> </message> <message> <source>Type: %1 </source> <translation>Тип: %1 </translation> </message> <message> <source>Label: %1 </source> <translation>Ознака: %1 </translation> </message> <message> <source>Address: %1 </source> <translation>Адреса: %1 </translation> </message> <message> <source>Sent transaction</source> <translation>Послата трансакција</translation> </message> <message> <source>Incoming transaction</source> <translation>Долазна трансакција</translation> </message> <message> <source>HD key generation is &lt;b&gt;enabled&lt;/b&gt;</source> <translation>Генерисање ХД кључа је &lt;b&gt;омогућено&lt;/b&gt;</translation> </message> <message> <source>HD key generation is &lt;b&gt;disabled&lt;/b&gt;</source> <translation>Генерисање ХД кључа је &lt;b&gt;онеомогућено&lt;/b&gt;</translation> </message> <message> <source>Private key &lt;b&gt;disabled&lt;/b&gt;</source> <translation>Приватни кључ &lt;b&gt;онемогућен&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Новчаник јс &lt;b&gt;шифриран&lt;/b&gt; и тренутно &lt;b&gt;откључан&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Новчаник јс &lt;b&gt;шифрован&lt;/b&gt; и тренутно &lt;b&gt;закључан&lt;/b&gt;</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Coin Selection</source> <translation>Избор новчића</translation> </message> <message> <source>Quantity:</source> <translation>Количина:</translation> </message> <message> <source>Bytes:</source> <translation>Бајта:</translation> </message> <message> <source>Amount:</source> <translation>Износ:</translation> </message> <message> <source>Fee:</source> <translation>Накнада:</translation> </message> <message> <source>Dust:</source> <translation>Прашина:</translation> </message> <message> <source>After Fee:</source> <translation>Након накнаде:</translation> </message> <message> <source>Change:</source> <translation>Промени:</translation> </message> <message> <source>(un)select all</source> <translation>(Де)Селектуј све</translation> </message> <message> <source>Tree mode</source> <translation>Прикажи као стабло</translation> </message> <message> <source>List mode</source> <translation>Прикажи као листу</translation> </message> <message> <source>Amount</source> <translation>Износ</translation> </message> <message> <source>Received with label</source> <translation>Примљено са ознаком</translation> </message> <message> <source>Received with address</source> <translation>Примљено са адресом</translation> </message> <message> <source>Date</source> <translation>Датум</translation> </message> <message> <source>Confirmations</source> <translation>Потврде</translation> </message> <message> <source>Confirmed</source> <translation>Потврђено</translation> </message> <message> <source>Copy address</source> <translation>Копирај адресу</translation> </message> <message> <source>Copy label</source> <translation>Копирај ознаку</translation> </message> <message> <source>Copy amount</source> <translation>Копирај износ</translation> </message> <message> <source>Copy transaction ID</source> <translation>Копирај идентификациони број трансакције</translation> </message> <message> <source>Lock unspent</source> <translation>Закључај непотрошено</translation> </message> <message> <source>Unlock unspent</source> <translation>Откључај непотрошено</translation> </message> <message> <source>Copy quantity</source> <translation>Копирај количину</translation> </message> <message> <source>Copy fee</source> <translation>Копирај провизију</translation> </message> <message> <source>Copy after fee</source> <translation>Копирај након провизије</translation> </message> <message> <source>Copy bytes</source> <translation>Копирај бајтове</translation> </message> <message> <source>Copy dust</source> <translation>Копирај прашину</translation> </message> <message> <source>Copy change</source> <translation>Копирај кусур</translation> </message> <message> <source>(%1 locked)</source> <translation>(%1 закључан)</translation> </message> <message> <source>yes</source> <translation>да</translation> </message> <message> <source>no</source> <translation>не</translation> </message> <message> <source>This label turns red if any recipient receives an amount smaller than the current dust threshold.</source> <translation>Ознака постаје црвена уколико прималац прими износ мањи од износа прашине - сићушног износа.</translation> </message> <message> <source>Can vary +/- %1 satoshi(s) per input.</source> <translation>Може варирати +/- %1 сатоши(ја) по инпуту.</translation> </message> <message> <source>(no label)</source> <translation>(без ознаке)</translation> </message> <message> <source>change from %1 (%2)</source> <translation>Измени од %1 (%2)</translation> </message> <message> <source>(change)</source> <translation>(промени)</translation> </message> </context> <context> <name>CreateWalletActivity</name> <message> <source>Creating Wallet &lt;b&gt;%1&lt;/b&gt;...</source> <translation>Креирање новчаника&lt;b&gt;%1... &lt;/b&gt;...</translation> </message> <message> <source>Create wallet failed</source> <translation>Креирање новчаника неуспешно</translation> </message> <message> <source>Create wallet warning</source> <translation>Направи упозорење за новчаник</translation> </message> </context> <context> <name>CreateWalletDialog</name> <message> <source>Create Wallet</source> <translation>Направи новчаник</translation> </message> <message> <source>Wallet Name</source> <translation>Име Новчаника</translation> </message> <message> <source>Encrypt the wallet. The wallet will be encrypted with a passphrase of your choice.</source> <translation>Шифрирај новчаник. Новчаник ће бити шифриран лозинком коју одаберете.</translation> </message> <message> <source>Encrypt Wallet</source> <translation>Шифрирај новчаник</translation> </message> <message> <source>Disable private keys for this wallet. Wallets with private keys disabled will have no private keys and cannot have an HD seed or imported private keys. This is ideal for watch-only wallets.</source> <translation>Онемогући приватни кључ за овај новчаник. Новчаници са онемогућеним приватним кључем неће имати приватни кључ и не могу имати HD семе или увезени приватни кључ. Ова опција идеална је за новчанике који су искључиво за посматрање.</translation> </message> <message> <source>Disable Private Keys</source> <translation>Онемогући Приватне Кључеве</translation> </message> <message> <source>Make a blank wallet. Blank wallets do not initially have private keys or scripts. Private keys and addresses can be imported, or an HD seed can be set, at a later time.</source> <translation>Направи празан новчаник. Празни новчанци немају приватане кључеве или скрипте. Приватни кључеви могу се увести, или HD семе може бити постављено касније.</translation> </message> <message> <source>Make Blank Wallet</source> <translation>Направи Празан Новчаник</translation> </message> <message> <source>Create</source> <translation>Направи</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Измени адресу</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Ознака</translation> </message> <message> <source>The label associated with this address list entry</source> <translation>Ознака повезана са овом ставком из листе адреса</translation> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>Адреса повезана са овом ставком из листе адреса. Ово можете променити једини у случају адреса за плаћање.</translation> </message> <message> <source>&amp;Address</source> <translation>&amp;Адреса</translation> </message> <message> <source>New sending address</source> <translation>Нова адреса за слање</translation> </message> <message> <source>Edit receiving address</source> <translation>Измени адресу за примање</translation> </message> <message> <source>Edit sending address</source> <translation>Измени адресу за слање</translation> </message> <message> <source>The entered address "%1" is not a valid Particl address.</source> <translation>Унета адреса "%1" није важећа Биткоин адреса.</translation> </message> <message> <source>Address "%1" already exists as a receiving address with label "%2" and so cannot be added as a sending address.</source> <translation>Адреса "%1" већ постоји као примајућа адреса са ознаком "%2" и не може бити додата као адреса за слање.</translation> </message> <message> <source>The entered address "%1" is already in the address book with label "%2".</source> <translation>Унета адреса "%1" већ постоји у адресару са ознаком "%2".</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Новчаник није могуће откључати.</translation> </message> <message> <source>New key generation failed.</source> <translation>Генерисање новог кључа није успело.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation>Нови директоријум података биће креиран.</translation> </message> <message> <source>name</source> <translation>име</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Директоријум већ постоји. Додајте %1 ако намеравате да креирате нови директоријум овде.</translation> </message> <message> <source>Path already exists, and is not a directory.</source> <translation>Путања већ постоји и није директоријум.</translation> </message> <message> <source>Cannot create data directory here.</source> <translation>Не можете креирати директоријум података овде.</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>version</source> <translation>верзија</translation> </message> <message> <source>About %1</source> <translation>Приближно %1</translation> </message> <message> <source>Command-line options</source> <translation>Опције командне линије</translation> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Добродошли</translation> </message> <message> <source>Welcome to %1.</source> <translation>Добродошли на %1.</translation> </message> <message> <source>As this is the first time the program is launched, you can choose where %1 will store its data.</source> <translation>Пошто је ово први пут да је програм покренут, можете изабрати где ће %1 чувати своје податке.</translation> </message> <message> <source>When you click OK, %1 will begin to download and process the full %4 block chain (%2GB) starting with the earliest transactions in %3 when %4 initially launched.</source> <translation>Када кликнете на ОК, %1 ће почети с преузимањем и процесуирањем целокупног ланца блокова %4 (%2GB), почевши од најранијих трансакција у %3 када је %4 покренут.</translation> </message> <message> <source>Reverting this setting requires re-downloading the entire blockchain. It is faster to download the full chain first and prune it later. Disables some advanced features.</source> <translation>Враћање ове опције захтева поновно преузимање целокупног блокчејна - ланца блокова. Брже је преузети цели ланац и касније га скратити. Онемогућава неке напредне опције.</translation> </message> <message> <source>This initial synchronisation is very demanding, and may expose hardware problems with your computer that had previously gone unnoticed. Each time you run %1, it will continue downloading where it left off.</source> <translation>Првобитна синхронизација веома је захтевна и може изложити ваш рачунар хардверским проблемима који раније нису били примећени. Сваки пут када покренете %1, преузимање ће се наставити тамо где је било прекинуто.</translation> </message> <message> <source>If you have chosen to limit block chain storage (pruning), the historical data must still be downloaded and processed, but will be deleted afterward to keep your disk usage low.</source> <translation>Ако сте одлучили да ограничите складиштење ланаца блокова (тримовање), историјски подаци се ипак морају преузети и обрадити, али ће након тога бити избрисани како би се ограничила употреба диска.</translation> </message> <message> <source>Use the default data directory</source> <translation>Користите подразумевани директоријум података</translation> </message> <message> <source>Use a custom data directory:</source> <translation>Користите прилагођени директоријум података:</translation> </message> <message> <source>Particl</source> <translation>Биткоин</translation> </message> <message> <source>Discard blocks after verification, except most recent %1 GB (prune)</source> <translation>Обриши блокове након верификације, осим најновије %1 GB (скраћено)</translation> </message> <message> <source>At least %1 GB of data will be stored in this directory, and it will grow over time.</source> <translation>Најмање %1 GB подататака биће складиштен у овај директорјиум који ће временом порасти.</translation> </message> <message> <source>Approximately %1 GB of data will be stored in this directory.</source> <translation>Најмање %1 GB подататака биће складиштен у овај директорјиум.</translation> </message> <message> <source>%1 will download and store a copy of the Particl block chain.</source> <translation>%1 биће преузеће и складиштити копију Биткоин ланца блокова.</translation> </message> <message> <source>The wallet will also be stored in this directory.</source> <translation>Новчаник ће бити складиштен у овом директоријуму.</translation> </message> <message> <source>Error: Specified data directory "%1" cannot be created.</source> <translation>Грешка: Одабрана датотека "%1" не може бити креирана.</translation> </message> <message> <source>Error</source> <translation>Грешка</translation> </message> <message numerus="yes"> <source>%n GB of free space available</source> <translation><numerusform>Доступно %n GB слободног простора</numerusform><numerusform>Доступно %n GB слободног простора</numerusform><numerusform>Доступно %n GB слободног простора</numerusform></translation> </message> <message numerus="yes"> <source>(of %n GB needed)</source> <translation><numerusform>(од потребних %n GB)</numerusform><numerusform>(од потребних %n GB)</numerusform><numerusform>(од потребних %n GB)</numerusform></translation> </message> <message numerus="yes"> <source>(%n GB needed for full chain)</source> <translation><numerusform>(%n GB потребно за цео ланац)</numerusform><numerusform>(%n GB потребно за цео ланац)</numerusform><numerusform>(%n GB потребно за цео ланац)</numerusform></translation> </message> </context> <context> <name>ModalOverlay</name> <message> <source>Form</source> <translation>Форма</translation> </message> <message> <source>Recent transactions may not yet be visible, and therefore your wallet's balance might be incorrect. This information will be correct once your wallet has finished synchronizing with the particl network, as detailed below.</source> <translation>Недавне трансакције можда не буду видљиве, зато салдо твог новчаника можда буде нетачан. Ова информација биђе тачна када новчаник заврши са синхронизацијом биткоин мреже, приказаној испод.</translation> </message> <message> <source>Attempting to spend particl that are affected by not-yet-displayed transactions will not be accepted by the network.</source> <translation>Покушај слања биткоина који су под утицајем још не приказаних трансакција неће бити прихваћен од стране мреже.</translation> </message> <message> <source>Number of blocks left</source> <translation>Преостала количина блокова</translation> </message> <message> <source>Unknown...</source> <translation>Непознато...</translation> </message> <message> <source>Last block time</source> <translation>Време последњег блока</translation> </message> <message> <source>Progress</source> <translation>Напредак</translation> </message> <message> <source>Progress increase per hour</source> <translation>Пораст напретка по часу</translation> </message> <message> <source>calculating...</source> <translation>рачунање...</translation> </message> <message> <source>Estimated time left until synced</source> <translation>Оквирно време до краја синхронизације</translation> </message> <message> <source>Hide</source> <translation>Сакриј</translation> </message> <message> <source>Esc</source> <translation>Есц</translation> </message> <message> <source>%1 is currently syncing. It will download headers and blocks from peers and validate them until reaching the tip of the block chain.</source> <translation>%1 се синхронузује. Преузеће заглавља и блокове од клијената и потврдити их док не стигне на крај ланца блокова.</translation> </message> <message> <source>Unknown. Syncing Headers (%1, %2%)...</source> <translation>Непознато. Синхронизација заглавља (%1, %2%)...</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open particl URI</source> <translation>Отвори биткоин URI</translation> </message> <message> <source>URI:</source> <translation>URI:</translation> </message> </context> <context> <name>OpenWalletActivity</name> <message> <source>Open wallet failed</source> <translation>Отварање новчаника неуспешно</translation> </message> <message> <source>Open wallet warning</source> <translation>Упозорење приликом отварања новчаника</translation> </message> <message> <source>default wallet</source> <translation>подразумевани новчаник</translation> </message> <message> <source>Opening Wallet &lt;b&gt;%1&lt;/b&gt;...</source> <translation>Отварање новчаника&lt;b&gt;%1&lt;/b&gt;...</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Поставке</translation> </message> <message> <source>&amp;Main</source> <translation>&amp;Главни</translation> </message> <message> <source>Automatically start %1 after logging in to the system.</source> <translation>Аутоматски почети %1 након пријање на систем.</translation> </message> <message> <source>&amp;Start %1 on system login</source> <translation>&amp;Покрени %1 приликом пријаве на систем</translation> </message> <message> <source>Size of &amp;database cache</source> <translation>Величина кеша базе података</translation> </message> <message> <source>Number of script &amp;verification threads</source> <translation>Број скрипти и CPU за верификацију</translation> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>ИП адреса проксија (нпр. IPv4: 127.0.0.1 / IPv6: ::1)</translation> </message> <message> <source>Shows if the supplied default SOCKS5 proxy is used to reach peers via this network type.</source> <translation>Приказује се ако је испоручени уобичајени SOCKS5 проxy коришћен ради проналажења клијената преко овог типа мреже. </translation> </message> <message> <source>Hide the icon from the system tray.</source> <translation>Сакриј икону са системске траке.</translation> </message> <message> <source>&amp;Hide tray icon</source> <translation>&amp;Сакриј икону</translation> </message> <message> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Exit in the menu.</source> <translation>Минимизирање уместо искључивања апликације када се прозор затвори. Када је ова опција омогућена, апликација ће бити затворена тек након одабира Излаз у менију. </translation> </message> <message> <source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source> <translation>URL треће стране (нпр блок претраживач) који се појављује у менију трансакције. %s у URL  замењен је хашом трансакције. Више URL-ова поделено је вертикалом |.</translation> </message> <message> <source>Open the %1 configuration file from the working directory.</source> <translation>Отвори %1 конфигурациони фајл из директоријума у употреби.</translation> </message> <message> <source>Open Configuration File</source> <translation>Отвори Конфигурациону Датотеку</translation> </message> <message> <source>Reset all client options to default.</source> <translation>Ресетуј све опције клијента на почетна подешавања.</translation> </message> <message> <source>&amp;Reset Options</source> <translation>&amp;Ресет Опције</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Мрежа</translation> </message> <message> <source>Disables some advanced features but all blocks will still be fully validated. Reverting this setting requires re-downloading the entire blockchain. Actual disk usage may be somewhat higher.</source> <translation>Онемогућава поједина напредна својства, али ће сви блокови у потпуности бити валидирани. Враћање ове опције захтева да поновно преузимање целокупонг блокчејна.</translation> </message> <message> <source>Prune &amp;block storage to</source> <translation>Сакрати &amp;block складиштење на</translation> </message> <message> <source>GB</source> <translation>GB</translation> </message> <message> <source>Reverting this setting requires re-downloading the entire blockchain.</source> <translation>Враћање ове опције захтева да поновно преузимање целокупонг блокчејна.</translation> </message> <message> <source>MiB</source> <translation>MiB</translation> </message> <message> <source>(0 = auto, &lt;0 = leave that many cores free)</source> <translation>(0 = аутоматски одреди, &lt;0 = остави слободно толико језгара)</translation> </message> <message> <source>W&amp;allet</source> <translation>Н&amp;овчаник</translation> </message> <message> <source>Expert</source> <translation>Експерт</translation> </message> <message> <source>Enable coin &amp;control features</source> <translation>Омогући опцију контроле новчића</translation> </message> <message> <source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source> <translation>Уколико онемогућиш трошење непотврђеног кусура, кусур трансакције неће моћи да се користи док транскација нема макар једну потврду. Ово такође утиче како ће се салдо рачунати.</translation> </message> <message> <source>&amp;Spend unconfirmed change</source> <translation>&amp;Троши непотврђени кусур</translation> </message> <message> <source>Automatically open the Particl client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Аутоматски отвори Биткоин клијент порт на рутеру. Ова опција ради само уколико твој рутер подржава и има омогућен UPnP.</translation> </message> <message> <source>Map port using &amp;UPnP</source> <translation>Мапирај порт користећи &amp;UPnP</translation> </message> <message> <source>Accept connections from outside.</source> <translation>Прихвати спољашње концекције.</translation> </message> <message> <source>Allow incomin&amp;g connections</source> <translation>Дозволи долазеће конекције.</translation> </message> <message> <source>Connect to the Particl network through a SOCKS5 proxy.</source> <translation>Конектуј се на Биткоин мрежу кроз SOCKS5 проксијем.</translation> </message> <message> <source>&amp;Connect through SOCKS5 proxy (default proxy):</source> <translation>&amp;Конектуј се кроз SOCKS5 прокси (уобичајени прокси):</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>Прокси &amp;IP:</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Порт:</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Прокси порт (нпр. 9050)</translation> </message> <message> <source>Used for reaching peers via:</source> <translation>Коришћен за приступ другим чворовима преко:</translation> </message> <message> <source>IPv4</source> <translation>IPv4</translation> </message> <message> <source>IPv6</source> <translation>IPv6</translation> </message> <message> <source>Tor</source> <translation>Тор</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Покажи само иконицу у панелу након минимизирања прозора</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;минимизирај у доњу линију, уместо у програмску траку</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>Минимизирај при затварању</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Прикажи</translation> </message> <message> <source>User Interface &amp;language:</source> <translation>&amp;Језик корисничког интерфејса:</translation> </message> <message> <source>The user interface language can be set here. This setting will take effect after restarting %1.</source> <translation>Језик корисничког интерфејса може се овде поставити. Ово својство биће на снази након поновног покреања %1.</translation> </message> <message> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Јединица за приказивање износа:</translation> </message> <message> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Одабери уобичајену подјединицу која се приказује у интерфејсу и када се шаљу новчићи.</translation> </message> <message> <source>Whether to show coin control features or not.</source> <translation>Да ли да се прикажу опције контроле новчића или не.</translation> </message> <message> <source>&amp;Third party transaction URLs</source> <translation>&amp;URL-ови трансакција трећих страна</translation> </message> <message> <source>Options set in this dialog are overridden by the command line or in the configuration file:</source> <translation>Опције постављене у овом диалогу су поништене командном линијом или у конфигурационој датотеци:</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;Уреду</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Откажи</translation> </message> <message> <source>default</source> <translation>подразумевано</translation> </message> <message> <source>none</source> <translation>ниједно</translation> </message> <message> <source>Confirm options reset</source> <translation>Потврди ресет опција</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Рестарт клијента захтеван како би се промене активирале.</translation> </message> <message> <source>Client will be shut down. Do you want to proceed?</source> <translation>Клијент ће се искључити. Да ли желите да наставите?</translation> </message> <message> <source>Configuration options</source> <translation>Конфигурација својстава</translation> </message> <message> <source>The configuration file is used to specify advanced user options which override GUI settings. Additionally, any command-line options will override this configuration file.</source> <translation>Конфигурациона датотека се користи да одреди напредне корисничке опције које поништају подешавања у графичком корисничком интерфејсу.</translation> </message> <message> <source>Error</source> <translation>Грешка</translation> </message> <message> <source>The configuration file could not be opened.</source> <translation>Ова конфигурациона датотека не може бити отворена.</translation> </message> <message> <source>This change would require a client restart.</source> <translation>Ова промена захтева да се рачунар поново покрене.</translation> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>Достављена прокси адреса није валидна.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <source>Form</source> <translation>Форма</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Particl network after a connection is established, but this process has not completed yet.</source> <translation>Приказана информација може бити застарела. Ваш новчаник се аутоматски синхронизује са Биткоин мрежом након успостављања конекције, али овај процес је још увек у току.</translation> </message> <message> <source>Watch-only:</source> <translation>Само гледање:</translation> </message> <message> <source>Available:</source> <translation>Доступно:</translation> </message> <message> <source>Your current spendable balance</source> <translation>Салдо који можете потрошити</translation> </message> <message> <source>Pending:</source> <translation>На чекању:</translation> </message> <message> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Укупан број трансакција које још увек нису потврђене, и не рачунају се у салдо рачуна који је могуће потрошити</translation> </message> <message> <source>Immature:</source> <translation>Недоспело:</translation> </message> <message> <source>Mined balance that has not yet matured</source> <translation>Салдо рударења који још увек није доспео</translation> </message> <message> <source>Balances</source> <translation>Салдо</translation> </message> <message> <source>Total:</source> <translation>Укупно:</translation> </message> <message> <source>Your current total balance</source> <translation>Твој тренутни салдо</translation> </message> <message> <source>Your current balance in watch-only addresses</source> <translation>Твој тренутни салдо са гледај-само адресама</translation> </message> <message> <source>Spendable:</source> <translation>Могуће потрошити:</translation> </message> <message> <source>Recent transactions</source> <translation>Недавне трансакције</translation> </message> <message> <source>Unconfirmed transactions to watch-only addresses</source> <translation>Трансакције за гледај-само адресе које нису потврђене</translation> </message> <message> <source>Mined balance in watch-only addresses that has not yet matured</source> <translation>Салдорударења у адресама које су у моду само гледање, који још увек није доспео</translation> </message> <message> <source>Current total balance in watch-only addresses</source> <translation>Тренутни укупни салдо у адресама у опцији само-гледај</translation> </message> </context> <context> <name>PSBTOperationsDialog</name> <message> <source>Dialog</source> <translation>Дијалог</translation> </message> <message> <source>Sign Tx</source> <translation>Потпиши Трансакцију</translation> </message> <message> <source>Broadcast Tx</source> <translation>Емитуј Трансакцију</translation> </message> <message> <source>Copy to Clipboard</source> <translation>Копирајте у клипборд.</translation> </message> <message> <source>Save...</source> <translation>Сачувај...</translation> </message> <message> <source>Close</source> <translation>Затвори</translation> </message> <message> <source>Save Transaction Data</source> <translation>Сачувај Податке Трансакције</translation> </message> <message> <source>Partially Signed Transaction (Binary) (*.psbt)</source> <translation>Парцијално Потписана Трансакција (Binary) (*.psbt)</translation> </message> <message> <source>Total Amount</source> <translation>Укупан износ</translation> </message> <message> <source>or</source> <translation>или</translation> </message> </context> <context> <name>PaymentServer</name> <message> <source>Payment request error</source> <translation>Грешка у захтеву за плаћање</translation> </message> <message> <source>Cannot start particl: click-to-pay handler</source> <translation>Не могу покренути биткоин: "кликни-да-платиш" механизам</translation> </message> <message> <source>URI handling</source> <translation>URI руковање</translation> </message> <message> <source>'particl://' is not a valid URI. Use 'particl:' instead.</source> <translation>'particl://' није важећи URI. Уместо тога користити 'particl:'.</translation> </message> <message> <source>Cannot process payment request because BIP70 is not supported.</source> <translation>Захтев за плаћање не може се обрадити, јер BIP70 није подржан.</translation> </message> <message> <source>Due to widespread security flaws in BIP70 it's strongly recommended that any merchant instructions to switch wallets be ignored.</source> <translation>Због великог броја безбедносних пропуста у BIP70, препоручено је да се све инструкције трговаца за промену новчаника игноришу.</translation> </message> <message> <source>If you are receiving this error you should request the merchant provide a BIP21 compatible URI.</source> <translation>Уколико добијате грешку овог типа, потребно је да захтевате од трговца BIP21 компатибилан URI.</translation> </message> <message> <source>Invalid payment address %1</source> <translation>Неважећа адреса за плаћање %1</translation> </message> <message> <source>URI cannot be parsed! This can be caused by an invalid Particl address or malformed URI parameters.</source> <translation>URI се не може рашчланити! Ово може бити проузроковано неважећом Биткоин адресом или погрешно форматираним URI параметрима.</translation> </message> <message> <source>Payment request file handling</source> <translation>Руковање датотеком захтева за плаћање</translation> </message> </context> <context> <name>PeerTableModel</name> <message> <source>User Agent</source> <translation>Кориснички агент</translation> </message> <message> <source>Node/Service</source> <translation>Ноде/Сервис</translation> </message> <message> <source>NodeId</source> <translation>НодеИД</translation> </message> <message> <source>Ping</source> <translation>Пинг</translation> </message> <message> <source>Sent</source> <translation>Послато</translation> </message> <message> <source>Received</source> <translation>Примљено</translation> </message> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Износ</translation> </message> <message> <source>Enter a Particl address (e.g. %1)</source> <translation>Унеси Биткоин адресу, (нпр %1)</translation> </message> <message> <source>%1 d</source> <translation>%1 d</translation> </message> <message> <source>%1 h</source> <translation>%1 h</translation> </message> <message> <source>%1 m</source> <translation>%1 m</translation> </message> <message> <source>%1 s</source> <translation>%1 s</translation> </message> <message> <source>None</source> <translation>Nijedan</translation> </message> <message> <source>N/A</source> <translation>Није применљиво</translation> </message> <message> <source>%1 ms</source> <translation>%1 ms</translation> </message> <message numerus="yes"> <source>%n second(s)</source> <translation><numerusform>%n секунда</numerusform><numerusform>%n секунди</numerusform><numerusform>%n секунди</numerusform></translation> </message> <message numerus="yes"> <source>%n minute(s)</source> <translation><numerusform>%n минут</numerusform><numerusform>%n минута</numerusform><numerusform>%n минута</numerusform></translation> </message> <message numerus="yes"> <source>%n hour(s)</source> <translation><numerusform>%n час</numerusform><numerusform>%n часа</numerusform><numerusform>%n часова</numerusform></translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation><numerusform>%n минут</numerusform><numerusform>%n минута</numerusform><numerusform>%n минута</numerusform></translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation><numerusform>%n недеља</numerusform><numerusform>%n недеље</numerusform><numerusform>%n недеља</numerusform></translation> </message> <message> <source>%1 and %2</source> <translation>%1 и %2</translation> </message> <message numerus="yes"> <source>%n year(s)</source> <translation><numerusform>%n година</numerusform><numerusform>%n године</numerusform><numerusform>%n година</numerusform></translation> </message> <message> <source>%1 B</source> <translation>%1 B</translation> </message> <message> <source>%1 KB</source> <translation>%1 KB</translation> </message> <message> <source>%1 MB</source> <translation>%1 MB</translation> </message> <message> <source>%1 GB</source> <translation>%1 GB</translation> </message> <message> <source>Error: Specified data directory "%1" does not exist.</source> <translation>Грешка: Одабрани директорјиум датотеке "%1" не постоји.</translation> </message> <message> <source>Error: %1</source> <translation>Грешка: %1</translation> </message> <message> <source>%1 didn't yet exit safely...</source> <translation>%1 није изашао безбедно...</translation> </message> <message> <source>unknown</source> <translation>непознато</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <source>&amp;Save Image...</source> <translation>&amp;Сачувај Слику...</translation> </message> <message> <source>&amp;Copy Image</source> <translation>&amp;Копирај Слику</translation> </message> <message> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Дати резултат URI  предуг, покушај да сманиш текст за ознаку / поруку.</translation> </message> <message> <source>Error encoding URI into QR Code.</source> <translation>Грешка током енкодирања URI у QR Код.</translation> </message> <message> <source>QR code support not available.</source> <translation>QR код подршка није доступна.</translation> </message> <message> <source>Save QR Code</source> <translation>Упамти QR Код</translation> </message> <message> <source>PNG Image (*.png)</source> <translation>PNG Слка (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <source>N/A</source> <translation>Није применљиво</translation> </message> <message> <source>Client version</source> <translation>Верзија клијента</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Информације</translation> </message> <message> <source>General</source> <translation>Опште</translation> </message> <message> <source>Using BerkeleyDB version</source> <translation>Коришћење BerkeleyDB верзије.</translation> </message> <message> <source>Datadir</source> <translation>Datadir</translation> </message> <message> <source>To specify a non-default location of the data directory use the '%1' option.</source> <translation>Да би сте одредили локацију која није унапред задата за директоријум података користите '%1' опцију.</translation> </message> <message> <source>Blocksdir</source> <translation>Blocksdir</translation> </message> <message> <source>To specify a non-default location of the blocks directory use the '%1' option.</source> <translation>Да би сте одредили локацију која није унапред задата за директоријум блокова користите '%1' опцију.</translation> </message> <message> <source>Startup time</source> <translation>Време подизања система</translation> </message> <message> <source>Network</source> <translation>Мрежа</translation> </message> <message> <source>Name</source> <translation>Име</translation> </message> <message> <source>Number of connections</source> <translation>Број конекција</translation> </message> <message> <source>Block chain</source> <translation>Блокчејн</translation> </message> <message> <source>Memory Pool</source> <translation>Удружена меморија</translation> </message> <message> <source>Current number of transactions</source> <translation>Тренутни број трансакција</translation> </message> <message> <source>Memory usage</source> <translation>Употреба меморије</translation> </message> <message> <source>Wallet: </source> <translation>Новчаник</translation> </message> <message> <source>(none)</source> <translation>(ниједан)</translation> </message> <message> <source>&amp;Reset</source> <translation>&amp;Ресетуј</translation> </message> <message> <source>Received</source> <translation>Примљено</translation> </message> <message> <source>Sent</source> <translation>Послато</translation> </message> <message> <source>&amp;Peers</source> <translation>&amp;Колеге</translation> </message> <message> <source>Banned peers</source> <translation>Забрањене колеге на мрежи</translation> </message> <message> <source>Select a peer to view detailed information.</source> <translation>Одабери колегу да би видели детаљне информације</translation> </message> <message> <source>Direction</source> <translation>Правац</translation> </message> <message> <source>Version</source> <translation>Верзија</translation> </message> <message> <source>Starting Block</source> <translation>Почетни блок</translation> </message> <message> <source>Synced Headers</source> <translation>Синхронизована заглавља</translation> </message> <message> <source>Synced Blocks</source> <translation>Синхронизовани блокови</translation> </message> <message> <source>The mapped Autonomous System used for diversifying peer selection.</source> <translation>Мапирани аутономни систем који се користи за диверсификацију селекције колега чворова.</translation> </message> <message> <source>Mapped AS</source> <translation>Мапирани АС</translation> </message> <message> <source>User Agent</source> <translation>Кориснички агент</translation> </message> <message> <source>Node window</source> <translation>Ноде прозор</translation> </message> <message> <source>Open the %1 debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Отворите %1 датотеку са записима о отклоњеним грешкама из тренутног директоријума датотека. Ово може потрајати неколико секунди за велике датотеке записа.</translation> </message> <message> <source>Decrease font size</source> <translation>Смањи величину фонта</translation> </message> <message> <source>Increase font size</source> <translation>Увећај величину фонта</translation> </message> <message> <source>Services</source> <translation>Услуге</translation> </message> <message> <source>Connection Time</source> <translation>Време конекције</translation> </message> <message> <source>Last Send</source> <translation>Последње послато</translation> </message> <message> <source>Last Receive</source> <translation>Последње примљено</translation> </message> <message> <source>Ping Time</source> <translation>Пинг време</translation> </message> <message> <source>The duration of a currently outstanding ping.</source> <translation>Трајање тренутно неразрешеног пинга.</translation> </message> <message> <source>Ping Wait</source> <translation>Чекање на пинг</translation> </message> <message> <source>Min Ping</source> <translation>Мин Пинг</translation> </message> <message> <source>Time Offset</source> <translation>Помак времена</translation> </message> <message> <source>Last block time</source> <translation>Време последњег блока</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Отвори</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Конзола</translation> </message> <message> <source>&amp;Network Traffic</source> <translation>&amp; Саобраћај Мреже</translation> </message> <message> <source>Totals</source> <translation>Укупно</translation> </message> <message> <source>In:</source> <translation>Долазно:</translation> </message> <message> <source>Out:</source> <translation>Одлазно:</translation> </message> <message> <source>Debug log file</source> <translation>Дебугуј лог фајл</translation> </message> <message> <source>Clear console</source> <translation>Очисти конзолу</translation> </message> <message> <source>1 &amp;hour</source> <translation>1 &amp;Сат</translation> </message> <message> <source>1 &amp;day</source> <translation>1 &amp;дан</translation> </message> <message> <source>1 &amp;week</source> <translation>1 &amp;недеља</translation> </message> <message> <source>1 &amp;year</source> <translation>1 &amp;година</translation> </message> <message> <source>&amp;Disconnect</source> <translation>&amp;Прекини везу</translation> </message> <message> <source>Ban for</source> <translation>Забрани за</translation> </message> <message> <source>&amp;Unban</source> <translation>&amp;Уклони забрану</translation> </message> <message> <source>Welcome to the %1 RPC console.</source> <translation>Добродошли на %1 RPC конзоле.</translation> </message> <message> <source>Use up and down arrows to navigate history, and %1 to clear screen.</source> <translation>Користи стрелице горе и доле за навигацију историје, и %1 зa чишћење екрана.</translation> </message> <message> <source>Type %1 for an overview of available commands.</source> <translation>Укуцај %1 за преглед доступних команди.</translation> </message> <message> <source>For more information on using this console type %1.</source> <translation>За више информација о коришћењу конзиле укуцај %1.</translation> </message> <message> <source>WARNING: Scammers have been active, telling users to type commands here, stealing their wallet contents. Do not use this console without fully understanding the ramifications of a command.</source> <translation>УПОЗОРЕЊЕ: Преваранти активно говоре корисницима да овде укуцају команде, том приликом краду садржај новчаника. Немојте користити конзолу без претходног разумевања последица коришћења команди.</translation> </message> <message> <source>Network activity disabled</source> <translation>Активност мреже онемогућена</translation> </message> <message> <source>Executing command without any wallet</source> <translation>Извршење команде без новчаника</translation> </message> <message> <source>Executing command using "%1" wallet</source> <translation>Извршење команде коришћењем "%1" новчаника</translation> </message> <message> <source>(node id: %1)</source> <translation>(node id: %1)</translation> </message> <message> <source>via %1</source> <translation>преко %1</translation> </message> <message> <source>never</source> <translation>никад</translation> </message> <message> <source>Inbound</source> <translation>Долазеће</translation> </message> <message> <source>Outbound</source> <translation>Одлазеће</translation> </message> <message> <source>Unknown</source> <translation>Непознато</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>&amp;Amount:</source> <translation>&amp;Износ:</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Ознака</translation> </message> <message> <source>&amp;Message:</source> <translation>Poruka:</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Particl network.</source> <translation>Опциона порука коју можеш прикачити уз захтев за плаћање, која ће бити приказана када захтев буде отворен. Напомена: Порука неће бити послата са уплатом на Биткоин мрежи.</translation> </message> <message> <source>An optional label to associate with the new receiving address.</source> <translation>Опционална ознака за поистовећивање са новом примајућом адресом.</translation> </message> <message> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation>Користи ову форму како би захтевао уплату. Сва поља су &lt;b&gt;опционална&lt;/b&gt;.</translation> </message> <message> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation>Опциони износ за захтев. Остави празно или нула уколико не желиш прецизирати износ.</translation> </message> <message> <source>An optional label to associate with the new receiving address (used by you to identify an invoice). It is also attached to the payment request.</source> <translation>Опционална ознака за поистовећивање са новом адресом примаоца (користите је за идентификацију рачуна). Она је такође придодата захтеву за плаћање.</translation> </message> <message> <source>An optional message that is attached to the payment request and may be displayed to the sender.</source> <translation>Опциона порука која је придодата захтеву за плаћање и може бити приказана пошиљаоцу.</translation> </message> <message> <source>&amp;Create new receiving address</source> <translation>&amp;Направи нову адресу за примање</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Очисти сва пола форме.</translation> </message> <message> <source>Clear</source> <translation>Очисти</translation> </message> <message> <source>Native segwit addresses (aka Bech32 or BIP-173) reduce your transaction fees later on and offer better protection against typos, but old wallets don't support them. When unchecked, an address compatible with older wallets will be created instead.</source> <translation>Природне segwit адресе (нпр Bech32 или BIP-173) касније смањују трошкове трансакција и нуде бољу заштиту од грешака у куцању, али их стари новчаници не подржавају. Када није одабрано, биће креирана адреса компатибилна са старијим новчаницима.</translation> </message> <message> <source>Generate native segwit (Bech32) address</source> <translation>Направи segwit (Bech32) адресу</translation> </message> <message> <source>Requested payments history</source> <translation>Историја захтева за плаћање</translation> </message> <message> <source>Show the selected request (does the same as double clicking an entry)</source> <translation>Прикажи селектовани захтев (има исту сврху као и дупли клик на одговарајући унос)</translation> </message> <message> <source>Show</source> <translation>Прикажи</translation> </message> <message> <source>Remove the selected entries from the list</source> <translation>Уклони одабрани унос из листе</translation> </message> <message> <source>Remove</source> <translation>Уклони</translation> </message> <message> <source>Copy URI</source> <translation>Копирај URI</translation> </message> <message> <source>Copy label</source> <translation>Копирај ознаку</translation> </message> <message> <source>Copy message</source> <translation>Копирај поруку</translation> </message> <message> <source>Copy amount</source> <translation>Копирај износ</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Новчаник није могуће откључати.</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>Address:</source> <translation>Адреса:</translation> </message> <message> <source>Amount:</source> <translation>Износ:</translation> </message> <message> <source>Label:</source> <translation>Етикета</translation> </message> <message> <source>Message:</source> <translation>Порука:</translation> </message> <message> <source>Wallet:</source> <translation>Новчаник:</translation> </message> <message> <source>Copy &amp;URI</source> <translation>Копирај &amp;URI</translation> </message> <message> <source>Copy &amp;Address</source> <translation>Копирај &amp;Адресу</translation> </message> <message> <source>&amp;Save Image...</source> <translation>&amp;Сачувај Слику...</translation> </message> <message> <source>Request payment to %1</source> <translation>Захтевај уплату ка %1</translation> </message> <message> <source>Payment information</source> <translation>Информације о плаћању</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Датум</translation> </message> <message> <source>Label</source> <translation>Ознака</translation> </message> <message> <source>Message</source> <translation>Poruka</translation> </message> <message> <source>(no label)</source> <translation>(без ознаке)</translation> </message> <message> <source>(no message)</source> <translation>(нема поруке)</translation> </message> <message> <source>(no amount requested)</source> <translation>(нема захтеваног износа)</translation> </message> <message> <source>Requested</source> <translation>Захтевано</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Send Coins</source> <translation>Пошаљи новчиће</translation> </message> <message> <source>Coin Control Features</source> <translation>Опција контроле новчића</translation> </message> <message> <source>Inputs...</source> <translation>Инпути...</translation> </message> <message> <source>automatically selected</source> <translation>аутоматски одабрано</translation> </message> <message> <source>Insufficient funds!</source> <translation>Недовољно средстава!</translation> </message> <message> <source>Quantity:</source> <translation>Количина:</translation> </message> <message> <source>Bytes:</source> <translation>Бајта:</translation> </message> <message> <source>Amount:</source> <translation>Износ:</translation> </message> <message> <source>Fee:</source> <translation>Накнада:</translation> </message> <message> <source>After Fee:</source> <translation>Након накнаде:</translation> </message> <message> <source>Change:</source> <translation>Кусур:</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Уколико је ово активирано, али је промењена адреса празна или неважећа, промена ће бити послата на ново-генерисану адресу.</translation> </message> <message> <source>Custom change address</source> <translation>Прилагођена промењена адреса</translation> </message> <message> <source>Transaction Fee:</source> <translation>Провизија за трансакцију:</translation> </message> <message> <source>Choose...</source> <translation>Одабери...</translation> </message> <message> <source>Using the fallbackfee can result in sending a transaction that will take several hours or days (or never) to confirm. Consider choosing your fee manually or wait until you have validated the complete chain.</source> <translation>Коришћење безбедносне накнаде може резултовати у времену потребно за потврду трансакције од неколико сати или дана (или никад). Размислите о ручном одабиру провизије или сачекајте док нисте потврдили комплетан ланац.</translation> </message> <message> <source>Warning: Fee estimation is currently not possible.</source> <translation>Упозорење: Процена провизије тренутно није могућа.</translation> </message> <message> <source>Specify a custom fee per kB (1,000 bytes) of the transaction's virtual size. Note: Since the fee is calculated on a per-byte basis, a fee of "100 satoshis per kB" for a transaction size of 500 bytes (half of 1 kB) would ultimately yield a fee of only 50 satoshis.</source> <translation>Одредити прилагођену провизију по kB (1,000 битова) виртуелне величине трансакције. Напомена: С обзиром да се провизија рачуна на основу броја бајтова, провизија за "100 сатошија по kB" за величину трансакције од 500 бајтова (пола од 1 kB) ће аутоматски износити само 50 сатошија.</translation> </message> <message> <source>per kilobyte</source> <translation>по килобајту</translation> </message> <message> <source>Hide</source> <translation>Сакриј</translation> </message> <message> <source>Recommended:</source> <translation>Препоручено:</translation> </message> <message> <source>Custom:</source> <translation>Прилагођено:</translation> </message> <message> <source>(Smart fee not initialized yet. This usually takes a few blocks...)</source> <translation>(Паметна накнада још није покренута. Ово уобичајено траје неколико блокова...)</translation> </message> <message> <source>Send to multiple recipients at once</source> <translation>Пошаљи већем броју примаоца одједанпут</translation> </message> <message> <source>Add &amp;Recipient</source> <translation>Додај &amp;Примаоца</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Очисти сва поља форме.</translation> </message> <message> <source>Dust:</source> <translation>Прашина:</translation> </message> <message> <source>Hide transaction fee settings</source> <translation>Сакријте износ накнаде за трансакцију</translation> </message> <message> <source>When there is less transaction volume than space in the blocks, miners as well as relaying nodes may enforce a minimum fee. Paying only this minimum fee is just fine, but be aware that this can result in a never confirming transaction once there is more demand for particl transactions than the network can process.</source> <translation>Када је мањи обим трансакција од простора у блоку, рудари, као и повезани нодови могу применити минималну провизију. Плаћање само минималне накнаде - провизије је добро, али треба бити свестан да ово може резултовати трансакцијом која неће никада бити потврђена, у случају када је број захтева за биткоин трансакцијама већи од могућности мреже да обради.</translation> </message> <message> <source>A too low fee might result in a never confirming transaction (read the tooltip)</source> <translation>Сувише ниска накнада може резултовати у трансакцији која никад неће бити потврђена (прочитајте опис)</translation> </message> <message> <source>Confirmation time target:</source> <translation>Циљно време потврде:</translation> </message> <message> <source>Enable Replace-By-Fee</source> <translation>Омогући Замени-за-Провизију</translation> </message> <message> <source>With Replace-By-Fee (BIP-125) you can increase a transaction's fee after it is sent. Without this, a higher fee may be recommended to compensate for increased transaction delay risk.</source> <translation>Са Замени-за-Провизију (BIP-125) се може повећати висина провизије за трансакцију након што је послата. Без овога, виша провизија може бити препоручена да се смањи ризик од кашњења трансакције. </translation> </message> <message> <source>Clear &amp;All</source> <translation>Очисти &amp;Све</translation> </message> <message> <source>Balance:</source> <translation>Салдо:</translation> </message> <message> <source>Confirm the send action</source> <translation>Потврди акцију слања</translation> </message> <message> <source>S&amp;end</source> <translation>&amp;Пошаљи</translation> </message> <message> <source>Copy quantity</source> <translation>Копирај количину</translation> </message> <message> <source>Copy amount</source> <translation>Копирај износ</translation> </message> <message> <source>Copy fee</source> <translation>Копирај провизију</translation> </message> <message> <source>Copy after fee</source> <translation>Копирај након провизије</translation> </message> <message> <source>Copy bytes</source> <translation>Копирај бајтове</translation> </message> <message> <source>Copy dust</source> <translation>Копирај прашину</translation> </message> <message> <source>Copy change</source> <translation>Копирај промену</translation> </message> <message> <source>%1 (%2 blocks)</source> <translation>%1 (%2 блокови)</translation> </message> <message> <source>Cr&amp;eate Unsigned</source> <translation>Креирај непотписано</translation> </message> <message> <source>Creates a Partially Signed Particl Transaction (PSBT) for use with e.g. an offline %1 wallet, or a PSBT-compatible hardware wallet.</source> <translation>Креира делимично потписану Биткоин трансакцију (PSBT) за коришћење са нпр. офлајн %1 новчаником, или PSBT компатибилним хардверским новчаником. </translation> </message> <message> <source> from wallet '%1'</source> <translation>из новчаника '%1'</translation> </message> <message> <source>%1 to '%2'</source> <translation>%1 до '%2'</translation> </message> <message> <source>%1 to %2</source> <translation>%1 до %2</translation> </message> <message> <source>Do you want to draft this transaction?</source> <translation>Да ли желите да саставите ову трансакцију?</translation> </message> <message> <source>Are you sure you want to send?</source> <translation>Да ли сте сигурни да желите да пошаљете?</translation> </message> <message> <source>Save Transaction Data</source> <translation>Сачувај Податке Трансакције</translation> </message> <message> <source>Partially Signed Transaction (Binary) (*.psbt)</source> <translation>Делимично Потписана Трансакција (Binary) (*.psbt)</translation> </message> <message> <source>PSBT saved</source> <translation>PSBT сачуван</translation> </message> <message> <source>or</source> <translation>или</translation> </message> <message> <source>You can increase the fee later (signals Replace-By-Fee, BIP-125).</source> <translation>Можете повећати провизију касније (сигнали Замени-са-Провизијом, BIP-125).</translation> </message> <message> <source>Please, review your transaction.</source> <translation>Молим, размотрите вашу трансакцију.</translation> </message> <message> <source>Transaction fee</source> <translation>Провизија за трансакцију</translation> </message> <message> <source>Not signalling Replace-By-Fee, BIP-125.</source> <translation>Не сигнализира Замени-са-Провизијом, BIP-125.</translation> </message> <message> <source>Total Amount</source> <translation>Укупан износ</translation> </message> <message> <source>To review recipient list click "Show Details..."</source> <translation>Да би сте размотрили листу примаоца кликните на "Прикажи детаље..."</translation> </message> <message> <source>Confirm send coins</source> <translation>Потврдите слање новчића</translation> </message> <message> <source>Confirm transaction proposal</source> <translation>Потврдите предлог трансакције</translation> </message> <message> <source>Send</source> <translation>Пошаљи</translation> </message> <message> <source>Watch-only balance:</source> <translation>Само-гледање Стање:</translation> </message> <message> <source>The recipient address is not valid. Please recheck.</source> <translation>Адреса примаоца није валидна. Молим проверите поново.</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Овај износ за плаћање мора бити већи од 0.</translation> </message> <message> <source>The amount exceeds your balance.</source> <translation>Овај износ је већи од вашег салда.</translation> </message> <message> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Укупни износ премашује ваш салдо, када се %1 провизија за трансакцију укључи у износ.</translation> </message> <message> <source>Duplicate address found: addresses should only be used once each.</source> <translation>Пронађена је дуплирана адреса: адресе се требају користити само једном.</translation> </message> <message> <source>Transaction creation failed!</source> <translation>Израда трансакције није успела!</translation> </message> <message> <source>A fee higher than %1 is considered an absurdly high fee.</source> <translation>Провизија већа од %1 се сматра апсурдно високом провизијом.</translation> </message> <message> <source>Payment request expired.</source> <translation>Захтев за плаћање је истекао.</translation> </message> <message numerus="yes"> <source>Estimated to begin confirmation within %n block(s).</source> <translation><numerusform>Процењује се да ће започети потврду унутар %n блока.</numerusform><numerusform>Процењује се да ће започети потврду унутар %n блока.</numerusform><numerusform>Процењује се да ће започети потврду унутар %n блокова.</numerusform></translation> </message> <message> <source>Warning: Invalid Particl address</source> <translation>Упозорење: Неважећа Биткоин адреса</translation> </message> <message> <source>Warning: Unknown change address</source> <translation>Упозорење: Непозната адреса за промену</translation> </message> <message> <source>Confirm custom change address</source> <translation>Потврдите прилагођену адресу за промену</translation> </message> <message> <source>The address you selected for change is not part of this wallet. Any or all funds in your wallet may be sent to this address. Are you sure?</source> <translation>Адреса коју сте одабрали за промену није део овог новчаника. Део или цео износ вашег новчаника може бити послат на ову адресу. Да ли сте сигурни?</translation> </message> <message> <source>(no label)</source> <translation>(без ознаке)</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>A&amp;mount:</source> <translation>&amp;Износ:</translation> </message> <message> <source>Pay &amp;To:</source> <translation>Плати &amp;За:</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Ознака</translation> </message> <message> <source>Choose previously used address</source> <translation>Одабери претходно коришћену адресу</translation> </message> <message> <source>The Particl address to send the payment to</source> <translation>Биткоин адреса на коју се шаље уплата</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Налепите адресу из базе за копирање</translation> </message> <message> <source>Alt+P</source> <translation>Alt+П</translation> </message> <message> <source>Remove this entry</source> <translation>Уклоните овај унос</translation> </message> <message> <source>The amount to send in the selected unit</source> <translation>Износ који ће бити послат у одабрану јединицу</translation> </message> <message> <source>The fee will be deducted from the amount being sent. The recipient will receive less particl than you enter in the amount field. If multiple recipients are selected, the fee is split equally.</source> <translation>Провизија ће бити одузета од износа који је послат. Примаоц ће добити мање биткоина него што је унесено у поље за износ. Уколико је одабрано више примаоца, провизија се дели равномерно.</translation> </message> <message> <source>S&amp;ubtract fee from amount</source> <translation>&amp;Одузми провизију од износа</translation> </message> <message> <source>Use available balance</source> <translation>Користи расположиви салдо</translation> </message> <message> <source>Message:</source> <translation>Порука:</translation> </message> <message> <source>This is an unauthenticated payment request.</source> <translation>Ово је неовлашћени захтев за плаћање.</translation> </message> <message> <source>This is an authenticated payment request.</source> <translation>Ово је овлашћени захтев за плаћање.</translation> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Унесите ознаку за ову адресу да бисте је додали на листу коришћених адреса</translation> </message> <message> <source>A message that was attached to the particl: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Particl network.</source> <translation>Порука која је приложена биткоину: URI која ће бити сачувана уз трансакцију ради референце. Напомена: Ова порука се шаље преко Биткоин мреже.</translation> </message> <message> <source>Pay To:</source> <translation>Плати ка:</translation> </message> <message> <source>Memo:</source> <translation>Мемо:</translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <source>%1 is shutting down...</source> <translation>%1 се искључује</translation> </message> <message> <source>Do not shut down the computer until this window disappears.</source> <translation>Немојте искључити рачунар док овај прозор не нестане.</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Signatures - Sign / Verify a Message</source> <translation>Потписи - Потпиши / Потврди поруку</translation> </message> <message> <source>You can sign messages/agreements with your addresses to prove you can receive particl sent to them. Be careful not to sign anything vague or random, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Можете потписати поруку/споразум са вашом адресом да би сте доказали да можете примити биткоин послат ка њима. Будите опрезни да не потписујете ништа нејасно или случајно, јер се може десити напад крађе идентитета, да потпишете ваш идентитет нападачу. Потпишите само потпуно детаљне изјаве са којима се слажете.</translation> </message> <message> <source>The Particl address to sign the message with</source> <translation>Биткоин адреса са којом ћете потписати поруку</translation> </message> <message> <source>Choose previously used address</source> <translation>Промени претходно коришћену адресу</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Налепите адресу из базе за копирање</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Enter the message you want to sign here</source> <translation>Унесите поруку коју желите да потпишете овде</translation> </message> <message> <source>Signature</source> <translation>Потпис</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Копирајте тренутни потпис у системску базу за копирање</translation> </message> <message> <source>Sign the message to prove you own this Particl address</source> <translation>Потпишите поруку да докажете да сте власник ове Биткоин адресе</translation> </message> <message> <source>Sign &amp;Message</source> <translation>Потпис &amp;Порука</translation> </message> <message> <source>Reset all sign message fields</source> <translation>Поништите сва поља за потписивање поруке</translation> </message> <message> <source>Clear &amp;All</source> <translation>Очисти &amp;Све</translation> </message> <message> <source>&amp;Verify Message</source> <translation>&amp;Потврди поруку</translation> </message> <message> <source>Enter the receiver's address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack. Note that this only proves the signing party receives with the address, it cannot prove sendership of any transaction!</source> <translation>Унесите адресу примаоца, поруку (осигурајте да тачно копирате прекиде линија, размаке, картице итд) и потпишите испод да потврдите поруку. Будите опрезни да не убаците више у потпис од онога што је у потписаној поруци, да би сте избегли напад посредника. Имајте на уму да потпис само доказује да потписник прима са потписаном адресом, а не може да докаже слање било које трансакције!</translation> </message> <message> <source>The Particl address the message was signed with</source> <translation>Биткоин адреса са којом је потписана порука</translation> </message> <message> <source>The signed message to verify</source> <translation>Потписана порука за потврду</translation> </message> <message> <source>The signature given when the message was signed</source> <translation>Потпис који је дат приликом потписивања поруке</translation> </message> <message> <source>Verify the message to ensure it was signed with the specified Particl address</source> <translation>Потврдите поруку да осигурате да је потписана са одговарајућом Биткоин адресом</translation> </message> <message> <source>Verify &amp;Message</source> <translation>Потврди &amp;Поруку</translation> </message> <message> <source>Reset all verify message fields</source> <translation>Поништите сва поља за потврду поруке</translation> </message> <message> <source>Click "Sign Message" to generate signature</source> <translation>Притисни "Потпиши поруку" за израду потписа</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Унесена адреса није важећа.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Молим проверите адресу и покушајте поново.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Унесена адреса се не односи на кључ.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Откључавање новчаника је отказано.</translation> </message> <message> <source>No error</source> <translation>Нема грешке</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Приватни кључ за унесену адресу није доступан.</translation> </message> <message> <source>Message signing failed.</source> <translation>Потписивање поруке није успело.</translation> </message> <message> <source>Message signed.</source> <translation>Порука је потписана.</translation> </message> <message> <source>The signature could not be decoded.</source> <translation>Потпис не може бити декодиран.</translation> </message> <message> <source>Please check the signature and try again.</source> <translation>Молим проверите потпис и покушајте поново.</translation> </message> <message> <source>The signature did not match the message digest.</source> <translation>Потпис се не подудара са прегледом порука.</translation> </message> <message> <source>Message verification failed.</source> <translation>Провера поруке није успела.</translation> </message> <message> <source>Message verified.</source> <translation>Порука је проверена.</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <source>KB/s</source> <translation>KB/s</translation> </message> </context> <context> <name>TransactionDesc</name> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>Отворено за још %n блок.</numerusform><numerusform>Отворено за још %n блока</numerusform><numerusform>Отворено за још %n блокова</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Otvoreno do %1</translation> </message> <message> <source>0/unconfirmed, %1</source> <translation>0/непотврђено, %1</translation> </message> <message> <source>in memory pool</source> <translation>у удруженој меморији</translation> </message> <message> <source>not in memory pool</source> <translation>није у удруженој меморији</translation> </message> <message> <source>abandoned</source> <translation>напуштено</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1/непотврђено</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 порврде</translation> </message> <message> <source>Status</source> <translation>Статус</translation> </message> <message> <source>Date</source> <translation>Датум</translation> </message> <message> <source>Source</source> <translation>Извор</translation> </message> <message> <source>Generated</source> <translation>Генерисано</translation> </message> <message> <source>From</source> <translation>Од</translation> </message> <message> <source>unknown</source> <translation>непознато</translation> </message> <message> <source>To</source> <translation>За</translation> </message> <message> <source>own address</source> <translation>сопствена адреса</translation> </message> <message> <source>watch-only</source> <translation>гледај-само</translation> </message> <message> <source>label</source> <translation>ознака</translation> </message> <message> <source>Credit</source> <translation>Заслуге</translation> </message> <message numerus="yes"> <source>matures in %n more block(s)</source> <translation><numerusform>сазрева за %n блок</numerusform><numerusform>сазрева за %n блока</numerusform><numerusform>сазрева за %n блокова</numerusform></translation> </message> <message> <source>not accepted</source> <translation>није прихваћено</translation> </message> <message> <source>Debit</source> <translation>Задужење</translation> </message> <message> <source>Total debit</source> <translation>Укупно задужење</translation> </message> <message> <source>Total credit</source> <translation>Укупни кредит</translation> </message> <message> <source>Transaction fee</source> <translation>Провизија за трансакцију</translation> </message> <message> <source>Net amount</source> <translation>Нето износ</translation> </message> <message> <source>Message</source> <translation>Порука</translation> </message> <message> <source>Comment</source> <translation>Коментар</translation> </message> <message> <source>Transaction ID</source> <translation>ID Трансакције</translation> </message> <message> <source>Transaction total size</source> <translation>Укупна величина трансакције</translation> </message> <message> <source>Transaction virtual size</source> <translation>Виртуелна величина трансакције</translation> </message> <message> <source>Output index</source> <translation>Излазни индекс</translation> </message> <message> <source> (Certificate was not verified)</source> <translation>(Сертификат још није проверен)</translation> </message> <message> <source>Merchant</source> <translation>Трговац</translation> </message> <message> <source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Генерисани новчићи морају доспети %1 блокова пре него што могу бити потрошени. Када генеришете овај блок, он се емитује у мрежу, да би био придодат на ланац блокова. Укупно не успе да се придода на ланац, његово стање се мења у "није прихваћен" и неће га бити могуће потрошити. Ово се може повремено десити уколико други чвор генерише блок у периоду од неколико секунди од вашег.</translation> </message> <message> <source>Debug information</source> <translation>Информације о оклањању грешака</translation> </message> <message> <source>Transaction</source> <translation>Трансакције</translation> </message> <message> <source>Inputs</source> <translation>Инпути</translation> </message> <message> <source>Amount</source> <translation>Износ</translation> </message> <message> <source>true</source> <translation>тачно</translation> </message> <message> <source>false</source> <translation>нетачно</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <source>This pane shows a detailed description of the transaction</source> <translation>Овај одељак приказује детањан приказ трансакције</translation> </message> <message> <source>Details for %1</source> <translation>Детаљи за %1</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Датум</translation> </message> <message> <source>Type</source> <translation>Тип</translation> </message> <message> <source>Label</source> <translation>Ознака</translation> </message> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>Отворено за још %n блок </numerusform><numerusform>Отворено за још %n блока</numerusform><numerusform> Отворено за још %n блокова</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Отворено до %1</translation> </message> <message> <source>Unconfirmed</source> <translation>Непотврђено</translation> </message> <message> <source>Abandoned</source> <translation>Напуштено</translation> </message> <message> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Потврђивање у току (%1 од %2 препоручене потврде)</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Potvrdjena (%1 potvrdjenih)</translation> </message> <message> <source>Conflicted</source> <translation>Неуслагашен</translation> </message> <message> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Није доспео (%1 потврде, биће доступан након %2)</translation> </message> <message> <source>Generated but not accepted</source> <translation>Генерисан али није прихваћен</translation> </message> <message> <source>Received with</source> <translation>Примљен са</translation> </message> <message> <source>Received from</source> <translation>Примљено од</translation> </message> <message> <source>Sent to</source> <translation>Послато ка</translation> </message> <message> <source>Payment to yourself</source> <translation>Уплата самом себи</translation> </message> <message> <source>Mined</source> <translation>Рударено</translation> </message> <message> <source>watch-only</source> <translation>гледај-само</translation> </message> <message> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <source>(no label)</source> <translation>(без ознаке)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Статус трансакције. Пређи мишем преко поља за приказ броја трансакција.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>Датум и време пријема трансакције</translation> </message> <message> <source>Type of transaction.</source> <translation>Тип трансакције.</translation> </message> <message> <source>Whether or not a watch-only address is involved in this transaction.</source> <translation>Без обзира да ли је у ову трансакције укључена или није - адреса само за гледање.</translation> </message> <message> <source>User-defined intent/purpose of the transaction.</source> <translation>Намена / сврха трансакције коју одређује корисник.</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Износ одбијен или додат салду.</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>All</source> <translation>Све</translation> </message> <message> <source>Today</source> <translation>Данас</translation> </message> <message> <source>This week</source> <translation>Oве недеље</translation> </message> <message> <source>This month</source> <translation>Овог месеца</translation> </message> <message> <source>Last month</source> <translation>Претходног месеца</translation> </message> <message> <source>This year</source> <translation>Ове године</translation> </message> <message> <source>Range...</source> <translation>Опсег...</translation> </message> <message> <source>Received with</source> <translation>Примљен са...</translation> </message> <message> <source>Sent to</source> <translation>Послат ка</translation> </message> <message> <source>To yourself</source> <translation>Теби</translation> </message> <message> <source>Mined</source> <translation>Рударено</translation> </message> <message> <source>Other</source> <translation>Други</translation> </message> <message> <source>Enter address, transaction id, or label to search</source> <translation>Унесите адресу, ознаку трансакције, или назив за претрагу</translation> </message> <message> <source>Min amount</source> <translation>Минимални износ</translation> </message> <message> <source>Abandon transaction</source> <translation>Напусти трансакцију</translation> </message> <message> <source>Increase transaction fee</source> <translation>Повећај провизију трансакције</translation> </message> <message> <source>Copy address</source> <translation>Копирај адресу</translation> </message> <message> <source>Copy label</source> <translation>Копирај ознаку</translation> </message> <message> <source>Copy amount</source> <translation>Копирај износ</translation> </message> <message> <source>Copy transaction ID</source> <translation>Копирај идентификациони број трансакције</translation> </message> <message> <source>Copy raw transaction</source> <translation>Копирајте необрађену трансакцију</translation> </message> <message> <source>Copy full transaction details</source> <translation>Копирајте потпуне детаље трансакције</translation> </message> <message> <source>Edit label</source> <translation>Измени ознаку</translation> </message> <message> <source>Show transaction details</source> <translation>Прикажи детаље транакције</translation> </message> <message> <source>Export Transaction History</source> <translation>Извези Детаље Трансакције</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Фајл раздојен тачком (*.csv)</translation> </message> <message> <source>Confirmed</source> <translation>Потврђено</translation> </message> <message> <source>Watch-only</source> <translation>Само-гледање</translation> </message> <message> <source>Date</source> <translation>Датум</translation> </message> <message> <source>Type</source> <translation>Тип</translation> </message> <message> <source>Label</source> <translation>Ознака</translation> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>ID</source> <translation>ID</translation> </message> <message> <source>Exporting Failed</source> <translation>Извоз Неуспешан</translation> </message> <message> <source>There was an error trying to save the transaction history to %1.</source> <translation>Десила се грешка приликом покушаја да се сними историја трансакција на %1.</translation> </message> <message> <source>Exporting Successful</source> <translation>Извоз Успешан</translation> </message> <message> <source>The transaction history was successfully saved to %1.</source> <translation>Историја трансакција је успешно снимљена на %1.</translation> </message> <message> <source>Range:</source> <translation>Опсег:</translation> </message> <message> <source>to</source> <translation>до</translation> </message> </context> <context> <name>UnitDisplayStatusBarControl</name> <message> <source>Unit to show amounts in. Click to select another unit.</source> <translation>Јединица у којој се приказују износи. Притисни да се прикаже друга јединица.</translation> </message> </context> <context> <name>WalletController</name> <message> <source>Close wallet</source> <translation>Затвори новчаник</translation> </message> <message> <source>Are you sure you wish to close the wallet &lt;i&gt;%1&lt;/i&gt;?</source> <translation>Да ли сте сигурни да желите да затворите новчаник &lt;i&gt;%1&lt;/i&gt;?</translation> </message> <message> <source>Closing the wallet for too long can result in having to resync the entire chain if pruning is enabled.</source> <translation>Услед затварања новчаника на дугачки период времена може се десити да је потребна поновна синхронизација комплетног ланца, уколико је дозвољено резање.</translation> </message> <message> <source>Close all wallets</source> <translation>Затвори све новчанике</translation> </message> <message> <source>Are you sure you wish to close all wallets?</source> <translation>Да ли сигурно желите да затворите све новчанике?</translation> </message> </context> <context> <name>WalletFrame</name> <message> <source>Create a new wallet</source> <translation>Направи нови ночаник</translation> </message> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>Слање новца</translation> </message> <message> <source>Fee bump error</source> <translation>Изненадна грешка у накнади</translation> </message> <message> <source>Increasing transaction fee failed</source> <translation>Повећавање провизије за трансакцију није успело</translation> </message> <message> <source>Do you want to increase the fee?</source> <translation>Да ли желиш да увећаш накнаду?</translation> </message> <message> <source>Do you want to draft a transaction with fee increase?</source> <translation>Да ли желите да саставите трансакцију са повећаном провизијом?</translation> </message> <message> <source>Current fee:</source> <translation>Тренутна накнада:</translation> </message> <message> <source>Increase:</source> <translation>Увећај:</translation> </message> <message> <source>New fee:</source> <translation>Нова накнада:</translation> </message> <message> <source>Confirm fee bump</source> <translation>Потврдите ударну провизију</translation> </message> <message> <source>Can't draft transaction.</source> <translation>Није могуће саставити трансакцију.</translation> </message> <message> <source>PSBT copied</source> <translation>PSBT је копиран</translation> </message> <message> <source>Can't sign transaction.</source> <translation>Није могуће потписати трансакцију.</translation> </message> <message> <source>Could not commit transaction</source> <translation>Трансакција није могућа</translation> </message> <message> <source>default wallet</source> <translation>подразумевани новчаник</translation> </message> </context> <context> <name>WalletView</name> <message> <source>&amp;Export</source> <translation>&amp;Извези</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Извези податке из одабране картице у фајлj</translation> </message> <message> <source>Error</source> <translation>Грешка</translation> </message> <message> <source>Unable to decode PSBT from clipboard (invalid base64)</source> <translation>Није могуће декодирати PSBT из клипборд-а (неважећи base64)</translation> </message> <message> <source>Load Transaction Data</source> <translation>Учитај Податке Трансакције</translation> </message> <message> <source>Partially Signed Transaction (*.psbt)</source> <translation>Делимично Потписана Трансакција (*.psbt)</translation> </message> <message> <source>PSBT file must be smaller than 100 MiB</source> <translation>PSBT фајл мора бити мањи од 100 MiB</translation> </message> <message> <source>Unable to decode PSBT</source> <translation>Немогуће декодирати PSBT</translation> </message> <message> <source>Backup Wallet</source> <translation>Резервна копија новчаника</translation> </message> <message> <source>Wallet Data (*.dat)</source> <translation>Датотека новчаника (*.dat)</translation> </message> <message> <source>Backup Failed</source> <translation>Резервна копија није успела</translation> </message> <message> <source>There was an error trying to save the wallet data to %1.</source> <translation>Десила се грешка приликом покушаја да се сними датотека новчаника на %1.</translation> </message> <message> <source>Backup Successful</source> <translation>Резервна копија је успела</translation> </message> <message> <source>The wallet data was successfully saved to %1.</source> <translation>Датотека новчаника је успешно снимљена на %1.</translation> </message> <message> <source>Cancel</source> <translation>Откажи</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <source>Distributed under the MIT software license, see the accompanying file %s or %s</source> <translation>Дистрибуирано под MIT софтверском лиценцом, погледајте придружени документ %s или %s</translation> </message> <message> <source>Prune configured below the minimum of %d MiB. Please use a higher number.</source> <translation>Скраћивање је конфигурисано испод минимума од %d MiB. Молимо користите већи број.</translation> </message> <message> <source>Prune: last wallet synchronisation goes beyond pruned data. You need to -reindex (download the whole blockchain again in case of pruned node)</source> <translation>Скраћивање: последња синхронизација иде преко одрезаних података. Потребно је урадити ре-индексирање (преузети комплетан ланац блокова поново у случају одсеченог чвора)</translation> </message> <message> <source>Pruning blockstore...</source> <translation>Скраћивање спремљених блокова...</translation> </message> <message> <source>Unable to start HTTP server. See debug log for details.</source> <translation>Стартовање HTTP сервера није могуће. Погледати дневник исправљених грешака за детаље.</translation> </message> <message> <source>The %s developers</source> <translation>%s девелопери</translation> </message> <message> <source>Cannot obtain a lock on data directory %s. %s is probably already running.</source> <translation>Директоријум података се не може закључати %s. %s је вероватно већ покренут.</translation> </message> <message> <source>Cannot provide specific connections and have addrman find outgoing connections at the same.</source> <translation>Не може се обезбедити одређена конекција и да addrman нађе одлазне конекције у исто време.</translation> </message> <message> <source>Error reading %s! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Грешка у читању %s! Сви кључеви су прочитани коректно, али подаци о трансакцији или уноси у адресар могу недостајати или бити нетачни.</translation> </message> <message> <source>Please check that your computer's date and time are correct! If your clock is wrong, %s will not work properly.</source> <translation>Молим проверите да су време и датум на вашем рачунару тачни. Уколико је сат нетачан, %s неће радити исправно.</translation> </message> <message> <source>Please contribute if you find %s useful. Visit %s for further information about the software.</source> <translation>Молим донирајте, уколико сматрате %s корисним. Посетите %s за више информација о софтверу.</translation> </message> <message> <source>The block database contains a block which appears to be from the future. This may be due to your computer's date and time being set incorrectly. Only rebuild the block database if you are sure that your computer's date and time are correct</source> <translation>База података о блоковима садржи блок, за који се чини да је из будућности. Ово може бити услед тога што су време и датум на вашем рачунару нису подешени коректно. Покушајте обнову базе података о блоковима, само уколико сте сигурни да су време и датум на вашем рачунару исправни.</translation> </message> <message> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Ово је тестна верзија пред издавање - користите на ваш ризик - не користити за рударење или трговачку примену</translation> </message> <message> <source>This is the transaction fee you may discard if change is smaller than dust at this level</source> <translation>Ово је накнада за трансакцију коју можете одбацити уколико је мања од нивоа прашине</translation> </message> <message> <source>Unable to replay blocks. You will need to rebuild the database using -reindex-chainstate.</source> <translation>Блокове није могуће поново репродуковати. Ви ћете морати да обновите базу података користећи -reindex-chainstate.</translation> </message> <message> <source>Unable to rewind the database to a pre-fork state. You will need to redownload the blockchain</source> <translation>Није могуће вратити базу података на стање пре форк-а. Ви ћете морати да урадите поновно преузимање ланца блокова.</translation> </message> <message> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation>Упозорење: Изгледа да не постоји пуна сагласност на мрежи. Изгледа да одређени рудари имају проблеме.</translation> </message> <message> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Упозорење: Изгледа да се ми у потпуности не слажемо са нашим чворовима! Можда постоји потреба да урадите надоградњу, или други чворови морају да ураде надоградњу.</translation> </message> <message> <source>-maxmempool must be at least %d MB</source> <translation>-maxmempool мора бити минимално %d MB</translation> </message> <message> <source>Cannot resolve -%s address: '%s'</source> <translation>Не могу решити -%s адреса: '%s'</translation> </message> <message> <source>Change index out of range</source> <translation>Промењен индекс изван домета</translation> </message> <message> <source>Config setting for %s only applied on %s network when in [%s] section.</source> <translation>Подешавање конфигурације за %s је само примењено на %s мрежи када је у [%s] секцији.</translation> </message> <message> <source>Copyright (C) %i-%i</source> <translation>Ауторско право (C) %i-%i</translation> </message> <message> <source>Corrupted block database detected</source> <translation>Детектована је оштећена база података блокова</translation> </message> <message> <source>Could not find asmap file %s</source> <translation>Не могу пронаћи датотеку asmap %s</translation> </message> <message> <source>Could not parse asmap file %s</source> <translation>Не могу рашчланити датотеку asmap %s</translation> </message> <message> <source>Do you want to rebuild the block database now?</source> <translation>Да ли желите да сада обновите базу података блокова?</translation> </message> <message> <source>Error initializing block database</source> <translation>Грешка у иницијализацији базе података блокова</translation> </message> <message> <source>Error initializing wallet database environment %s!</source> <translation>Грешка код иницијализације окружења базе података новчаника %s!</translation> </message> <message> <source>Error loading %s</source> <translation>Грешка током учитавања %s</translation> </message> <message> <source>Error loading %s: Private keys can only be disabled during creation</source> <translation>Грешка током учитавања %s: Приватни кључеви могу бити онемогућени само приликом креирања</translation> </message> <message> <source>Error loading %s: Wallet corrupted</source> <translation>Грешка током учитавања %s: Новчаник је оштећен</translation> </message> <message> <source>Error loading %s: Wallet requires newer version of %s</source> <translation>Грешка током учитавања %s: Новчаник захтева новију верзију %s</translation> </message> <message> <source>Error loading block database</source> <translation>Грешка у учитавању базе података блокова</translation> </message> <message> <source>Error opening block database</source> <translation>Грешка приликом отварања базе података блокова</translation> </message> <message> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Преслушавање није успело ни на једном порту. Користите -listen=0 уколико желите то.</translation> </message> <message> <source>Failed to rescan the wallet during initialization</source> <translation>Није успело поновно скенирање новчаника приликом иницијализације.</translation> </message> <message> <source>Importing...</source> <translation>Увоз у току...</translation> </message> <message> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation>Почетни блок је погрешан или се не може пронаћи. Погрешан datadir за мрежу?</translation> </message> <message> <source>Initialization sanity check failed. %s is shutting down.</source> <translation>Провера исправности иницијализације није успела. %s се искључује.</translation> </message> <message> <source>Invalid P2P permission: '%s'</source> <translation>Неважећа P2P дозвола: '%s'</translation> </message> <message> <source>Invalid amount for -%s=&lt;amount&gt;: '%s'</source> <translation>Неважећи износ за %s=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -discardfee=&lt;amount&gt;: '%s'</source> <translation>Неважећи износ за -discardfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -fallbackfee=&lt;amount&gt;: '%s'</source> <translation>Неважећи износ за -fallbackfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Specified blocks directory "%s" does not exist.</source> <translation>Наведени директоријум блокова "%s" не постоји.</translation> </message> <message> <source>Unknown address type '%s'</source> <translation>Непознати тип адресе '%s'</translation> </message> <message> <source>Unknown change type '%s'</source> <translation>Непознати тип промене '%s'</translation> </message> <message> <source>Upgrading txindex database</source> <translation>Надоградња txindex базе података</translation> </message> <message> <source>Loading P2P addresses...</source> <translation>Учитавање P2P адреса...</translation> </message> <message> <source>Loading banlist...</source> <translation>Учитавање листе забрана...</translation> </message> <message> <source>Not enough file descriptors available.</source> <translation>Нема довољно доступних дескриптора датотеке.</translation> </message> <message> <source>Prune cannot be configured with a negative value.</source> <translation>Скраћење се не може конфигурисати са негативном вредношћу.</translation> </message> <message> <source>Prune mode is incompatible with -txindex.</source> <translation>Мод скраћивања није компатибилан са -txindex.</translation> </message> <message> <source>Replaying blocks...</source> <translation>Поновно репродуковање блокова...</translation> </message> <message> <source>Rewinding blocks...</source> <translation>Премотавање блокова...</translation> </message> <message> <source>The source code is available from %s.</source> <translation>Изворни код је доступан из %s.</translation> </message> <message> <source>Transaction fee and change calculation failed</source> <translation>Провизија за трансакцију и промена израчуна није успела</translation> </message> <message> <source>Unable to bind to %s on this computer. %s is probably already running.</source> <translation>Није могуће повезивање са %s на овом рачунару. %s је вероватно већ покренут.</translation> </message> <message> <source>Unable to generate keys</source> <translation>Није могуће генерисати кључеве</translation> </message> <message> <source>Unsupported logging category %s=%s.</source> <translation>Категорија записа није подржана %s=%s.</translation> </message> <message> <source>Upgrading UTXO database</source> <translation>Надоградња UTXO базе података</translation> </message> <message> <source>User Agent comment (%s) contains unsafe characters.</source> <translation>Коментар агента корисника (%s) садржи небезбедне знакове.</translation> </message> <message> <source>Verifying blocks...</source> <translation>Потврда блокова у току...</translation> </message> <message> <source>Wallet needed to be rewritten: restart %s to complete</source> <translation>Новчаник треба да буде преписан: поновно покрените %s да завршите</translation> </message> <message> <source>Error: Listening for incoming connections failed (listen returned error %s)</source> <translation>Грешка: Претрага за долазним конекцијама није успела (претрага враћа грешку %s)</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source> <translation>Неважећи износ за -maxtxfee=&lt;amount&gt;: '%s' (мора бити minrelay провизија од %s да би се спречило да се трансакција заглави)</translation> </message> <message> <source>The transaction amount is too small to send after the fee has been deducted</source> <translation>Износ трансакције је толико мали за слање након што се одузме провизија</translation> </message> <message> <source>You need to rebuild the database using -reindex to go back to unpruned mode. This will redownload the entire blockchain</source> <translation>Обновите базу података користећи -reindex да би се вратили у нескраћени мод. Ово ће урадити поновно преузимање комплетног ланца података</translation> </message> <message> <source>Disk space is too low!</source> <translation>Премало простора на диску!</translation> </message> <message> <source>Error reading from database, shutting down.</source> <translation>Грешка приликом читања из базе података, искључивање у току.</translation> </message> <message> <source>Error upgrading chainstate database</source> <translation>Грешка приликом надоградње базе података стања ланца</translation> </message> <message> <source>Error: Disk space is low for %s</source> <translation>Грешка: Простор на диску је мали за %s</translation> </message> <message> <source>Invalid -onion address or hostname: '%s'</source> <translation>Неважећа -onion адреса или име хоста: '%s'</translation> </message> <message> <source>Invalid -proxy address or hostname: '%s'</source> <translation>Неважећа -proxy адреса или име хоста: '%s'</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s' (must be at least %s)</source> <translation>Неважећи износ за -paytxfee=&lt;amount&gt;: '%s' (мора бити бар %s)</translation> </message> <message> <source>Invalid netmask specified in -whitelist: '%s'</source> <translation>Неважећа мрежна маска наведена у -whitelist: '%s'</translation> </message> <message> <source>Need to specify a port with -whitebind: '%s'</source> <translation>Ви морате одредити порт са -whitebind: '%s'</translation> </message> <message> <source>Prune mode is incompatible with -blockfilterindex.</source> <translation>Мод скраћења је некомпатибилна са -blockfilterindex.</translation> </message> <message> <source>Reducing -maxconnections from %d to %d, because of system limitations.</source> <translation>Смањивање -maxconnections са %d на %d, због ограничења система.</translation> </message> <message> <source>Section [%s] is not recognized.</source> <translation>Одељак [%s] није препознат.</translation> </message> <message> <source>Signing transaction failed</source> <translation>Потписивање трансакције није успело</translation> </message> <message> <source>Specified -walletdir "%s" does not exist</source> <translation>Наведени -walletdir "%s" не постоји</translation> </message> <message> <source>Specified -walletdir "%s" is a relative path</source> <translation>Наведени -walletdir "%s" је релативна путања</translation> </message> <message> <source>Specified -walletdir "%s" is not a directory</source> <translation>Наведени -walletdir "%s" није директоријум</translation> </message> <message> <source>The specified config file %s does not exist </source> <translation>Наведени конфигурациони документ %s не постоји </translation> </message> <message> <source>The transaction amount is too small to pay the fee</source> <translation>Износ трансакције је сувише мали да се плати трансакција</translation> </message> <message> <source>This is experimental software.</source> <translation>Ово је експерименталн софтвер.</translation> </message> <message> <source>Transaction amount too small</source> <translation>Износ трансакције премали.</translation> </message> <message> <source>Transaction too large</source> <translation>Трансакција превелика.</translation> </message> <message> <source>Unable to bind to %s on this computer (bind returned error %s)</source> <translation>Није могуће повезати %s на овом рачунару (веза враћа грешку %s)</translation> </message> <message> <source>Unable to create the PID file '%s': %s</source> <translation>Стварање PID документа '%s': %s није могуће</translation> </message> <message> <source>Unable to generate initial keys</source> <translation>Генерисање кључева за иницијализацију није могуће</translation> </message> <message> <source>Unknown -blockfilterindex value %s.</source> <translation>Непозната вредност -blockfilterindex %s.</translation> </message> <message> <source>Verifying wallet(s)...</source> <translation>Потврђивање новчаника(а)...</translation> </message> <message> <source>Warning: unknown new rules activated (versionbit %i)</source> <translation>Упозорење: активирано је ново непознато правило (versionbit %i)</translation> </message> <message> <source>-maxtxfee is set very high! Fees this large could be paid on a single transaction.</source> <translation>-maxtxfee је постављен сувише високо! Овако велике провизије могу бити наплаћене на само једној трансакцији.</translation> </message> <message> <source>This is the transaction fee you may pay when fee estimates are not available.</source> <translation>Ово је провизија за трансакцију коју можете платити када процена провизије није доступна.</translation> </message> <message> <source>Total length of network version string (%i) exceeds maximum length (%i). Reduce the number or size of uacomments.</source> <translation>Укупна дужина мрежне верзије низа (%i) је већа од максималне дужине (%i). Смањити број или величину корисничких коментара.</translation> </message> <message> <source>%s is set very high!</source> <translation>%s је постављен врло високо!</translation> </message> <message> <source>Error loading wallet %s. Duplicate -wallet filename specified.</source> <translation>Грешка приликом учитавања новчаника %s. Наведено је дуплирано име датотеке -wallet.</translation> </message> <message> <source>Starting network threads...</source> <translation>Покретање мрежних тема...</translation> </message> <message> <source>The wallet will avoid paying less than the minimum relay fee.</source> <translation>Новчаник ће избећи плаћање износа мањег него што је минимална повезана провизија.</translation> </message> <message> <source>This is the minimum transaction fee you pay on every transaction.</source> <translation>Ово је минимални износ провизије за трансакцију коју ћете платити на свакој трансакцији.</translation> </message> <message> <source>This is the transaction fee you will pay if you send a transaction.</source> <translation>Ово је износ провизије за трансакцију коју ћете платити уколико шаљете трансакцију.</translation> </message> <message> <source>Transaction amounts must not be negative</source> <translation>Износ трансакције не може бити негативан</translation> </message> <message> <source>Transaction has too long of a mempool chain</source> <translation>Трансакција има предугачак ланац у удруженој меморији</translation> </message> <message> <source>Transaction must have at least one recipient</source> <translation>Трансакција мора имати бар једног примаоца</translation> </message> <message> <source>Unknown network specified in -onlynet: '%s'</source> <translation>Непозната мрежа је наведена у -onlynet: '%s'</translation> </message> <message> <source>Insufficient funds</source> <translation>Недовољно средстава</translation> </message> <message> <source>Fee estimation failed. Fallbackfee is disabled. Wait a few blocks or enable -fallbackfee.</source> <translation>Процена провизије није успела. Промена провизије током трансакције је онемогућена. Сачекајте неколико блокова или омогућите -fallbackfee.</translation> </message> <message> <source>Warning: Private keys detected in wallet {%s} with disabled private keys</source> <translation>Упозорење: Приватни кључеви су пронађени у новчанику {%s} са онемогућеним приватним кључевима.</translation> </message> <message> <source>Cannot write to data directory '%s'; check permissions.</source> <translation>Није могуће извршити упис у директоријум података '%s'; проверите дозволе за упис.</translation> </message> <message> <source>Loading block index...</source> <translation>Учитавање индекса блокова</translation> </message> <message> <source>Loading wallet...</source> <translation>Новчаник се учитава...</translation> </message> <message> <source>Cannot downgrade wallet</source> <translation>Новчаник се не може уназадити</translation> </message> <message> <source>Rescanning...</source> <translation>Ponovo skeniram...</translation> </message> <message> <source>Done loading</source> <translation>Završeno učitavanje</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>query.py<|end_file_name|><|fim▁begin|># This file is part of beets. # Copyright 2016, Adrian Sampson. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. """The Query type hierarchy for DBCore. """ import re from operator import mul from beets import util from datetime import datetime, timedelta import unicodedata from functools import reduce class ParsingError(ValueError): """Abstract class for any unparseable user-requested album/query specification. """ class InvalidQueryError(ParsingError): """Represent any kind of invalid query. The query should be a unicode string or a list, which will be space-joined. """ def __init__(self, query, explanation): if isinstance(query, list): query = " ".join(query) message = f"'{query}': {explanation}" super().__init__(message) class InvalidQueryArgumentValueError(ParsingError): """Represent a query argument that could not be converted as expected. It exists to be caught in upper stack levels so a meaningful (i.e. with the query) InvalidQueryError can be raised. """ def __init__(self, what, expected, detail=None): message = f"'{what}' is not {expected}" if detail: message = f"{message}: {detail}" super().__init__(message) class Query: """An abstract class representing a query into the item database. """ def clause(self): """Generate an SQLite expression implementing the query. Return (clause, subvals) where clause is a valid sqlite WHERE clause implementing the query and subvals is a list of items to be substituted for ?s in the clause. """ return None, () def match(self, item): """Check whether this query matches a given Item. Can be used to perform queries on arbitrary sets of Items. """ raise NotImplementedError def __repr__(self): return f"{self.__class__.__name__}()" def __eq__(self, other): return type(self) == type(other) def __hash__(self): return 0 class FieldQuery(Query): """An abstract query that searches in a specific field for a pattern. Subclasses must provide a `value_match` class method, which determines whether a certain pattern string matches a certain value string. Subclasses may also provide `col_clause` to implement the same matching functionality in SQLite. """ def __init__(self, field, pattern, fast=True): self.field = field self.pattern = pattern self.fast = fast def col_clause(self): return None, () def clause(self): if self.fast: return self.col_clause() else: # Matching a flexattr. This is a slow query. return None, () @classmethod def value_match(cls, pattern, value): """Determine whether the value matches the pattern. Both arguments are strings. """ raise NotImplementedError() def match(self, item): return self.value_match(self.pattern, item.get(self.field)) def __repr__(self): return ("{0.__class__.__name__}({0.field!r}, {0.pattern!r}, " "{0.fast})".format(self)) def __eq__(self, other): return super().__eq__(other) and \ self.field == other.field and self.pattern == other.pattern def __hash__(self): return hash((self.field, hash(self.pattern))) class MatchQuery(FieldQuery): """A query that looks for exact matches in an item field.""" def col_clause(self): return self.field + " = ?", [self.pattern] @classmethod def value_match(cls, pattern, value): return pattern == value class NoneQuery(FieldQuery): """A query that checks whether a field is null.""" def __init__(self, field, fast=True): super().__init__(field, None, fast) def col_clause(self): return self.field + " IS NULL", () def match(self, item): return item.get(self.field) is None def __repr__(self): return "{0.__class__.__name__}({0.field!r}, {0.fast})".format(self) class StringFieldQuery(FieldQuery): """A FieldQuery that converts values to strings before matching them. """ @classmethod def value_match(cls, pattern, value): """Determine whether the value matches the pattern. The value may have any type. """ return cls.string_match(pattern, util.as_string(value)) @classmethod def string_match(cls, pattern, value): """Determine whether the value matches the pattern. Both arguments are strings. Subclasses implement this method. """ raise NotImplementedError() class StringQuery(StringFieldQuery): """A query that matches a whole string in a specific item field.""" def col_clause(self): search = (self.pattern .replace('\\', '\\\\') .replace('%', '\\%') .replace('_', '\\_')) clause = self.field + " like ? escape '\\'" subvals = [search] return clause, subvals @classmethod def string_match(cls, pattern, value): return pattern.lower() == value.lower() class SubstringQuery(StringFieldQuery): """A query that matches a substring in a specific item field.""" def col_clause(self): pattern = (self.pattern .replace('\\', '\\\\') .replace('%', '\\%') .replace('_', '\\_')) search = '%' + pattern + '%' clause = self.field + " like ? escape '\\'" subvals = [search] return clause, subvals @classmethod def string_match(cls, pattern, value): return pattern.lower() in value.lower() class RegexpQuery(StringFieldQuery): """A query that matches a regular expression in a specific item field. Raises InvalidQueryError when the pattern is not a valid regular expression. """ def __init__(self, field, pattern, fast=True): super().__init__(field, pattern, fast) pattern = self._normalize(pattern) try: self.pattern = re.compile(self.pattern) except re.error as exc: # Invalid regular expression. raise InvalidQueryArgumentValueError(pattern, "a regular expression", format(exc)) @staticmethod def _normalize(s): """Normalize a Unicode string's representation (used on both patterns and matched values). """ return unicodedata.normalize('NFC', s) @classmethod def string_match(cls, pattern, value): return pattern.search(cls._normalize(value)) is not None class BooleanQuery(MatchQuery): """Matches a boolean field. Pattern should either be a boolean or a string reflecting a boolean. """ def __init__(self, field, pattern, fast=True): super().__init__(field, pattern, fast) if isinstance(pattern, str): self.pattern = util.str2bool(pattern) self.pattern = int(self.pattern) class BytesQuery(MatchQuery): """Match a raw bytes field (i.e., a path). This is a necessary hack to work around the `sqlite3` module's desire to treat `bytes` and `unicode` equivalently in Python 2. Always use this query instead of `MatchQuery` when matching on BLOB values. """ def __init__(self, field, pattern): super().__init__(field, pattern) # Use a buffer/memoryview representation of the pattern for SQLite # matching. This instructs SQLite to treat the blob as binary # rather than encoded Unicode. if isinstance(self.pattern, (str, bytes)): if isinstance(self.pattern, str): self.pattern = self.pattern.encode('utf-8') self.buf_pattern = memoryview(self.pattern) elif isinstance(self.pattern, memoryview): self.buf_pattern = self.pattern self.pattern = bytes(self.pattern) def col_clause(self): return self.field + " = ?", [self.buf_pattern] class NumericQuery(FieldQuery): """Matches numeric fields. A syntax using Ruby-style range ellipses (``..``) lets users specify one- or two-sided ranges. For example, ``year:2001..`` finds music released since the turn of the century. Raises InvalidQueryError when the pattern does not represent an int or a float. """ def _convert(self, s): """Convert a string to a numeric type (float or int). Return None if `s` is empty. Raise an InvalidQueryError if the string cannot be converted. """ # This is really just a bit of fun premature optimization. if not s: return None try: return int(s) except ValueError: try: return float(s) except ValueError: raise InvalidQueryArgumentValueError(s, "an int or a float") def __init__(self, field, pattern, fast=True): super().__init__(field, pattern, fast) parts = pattern.split('..', 1) if len(parts) == 1: # No range. self.point = self._convert(parts[0]) self.rangemin = None self.rangemax = None else: # One- or two-sided range. self.point = None self.rangemin = self._convert(parts[0]) self.rangemax = self._convert(parts[1]) def match(self, item): if self.field not in item: return False value = item[self.field] if isinstance(value, str): value = self._convert(value) if self.point is not None: return value == self.point else: if self.rangemin is not None and value < self.rangemin: return False if self.rangemax is not None and value > self.rangemax: return False return True def col_clause(self): if self.point is not None: return self.field + '=?', (self.point,) else: if self.rangemin is not None and self.rangemax is not None: return ('{0} >= ? AND {0} <= ?'.format(self.field), (self.rangemin, self.rangemax)) elif self.rangemin is not None: return f'{self.field} >= ?', (self.rangemin,) elif self.rangemax is not None: return f'{self.field} <= ?', (self.rangemax,) else: return '1', () class CollectionQuery(Query): """An abstract query class that aggregates other queries. Can be indexed like a list to access the sub-queries. """ def __init__(self, subqueries=()): self.subqueries = subqueries # Act like a sequence. def __len__(self): return len(self.subqueries) def __getitem__(self, key): return self.subqueries[key] def __iter__(self): return iter(self.subqueries) def __contains__(self, item): return item in self.subqueries def clause_with_joiner(self, joiner): """Return a clause created by joining together the clauses of all subqueries with the string joiner (padded by spaces). """ clause_parts = [] subvals = [] for subq in self.subqueries: subq_clause, subq_subvals = subq.clause() if not subq_clause: # Fall back to slow query. return None, () clause_parts.append('(' + subq_clause + ')') subvals += subq_subvals clause = (' ' + joiner + ' ').join(clause_parts) return clause, subvals def __repr__(self): return "{0.__class__.__name__}({0.subqueries!r})".format(self) def __eq__(self, other): return super().__eq__(other) and \ self.subqueries == other.subqueries<|fim▁hole|> """Since subqueries are mutable, this object should not be hashable. However and for conveniences purposes, it can be hashed. """ return reduce(mul, map(hash, self.subqueries), 1) class AnyFieldQuery(CollectionQuery): """A query that matches if a given FieldQuery subclass matches in any field. The individual field query class is provided to the constructor. """ def __init__(self, pattern, fields, cls): self.pattern = pattern self.fields = fields self.query_class = cls subqueries = [] for field in self.fields: subqueries.append(cls(field, pattern, True)) super().__init__(subqueries) def clause(self): return self.clause_with_joiner('or') def match(self, item): for subq in self.subqueries: if subq.match(item): return True return False def __repr__(self): return ("{0.__class__.__name__}({0.pattern!r}, {0.fields!r}, " "{0.query_class.__name__})".format(self)) def __eq__(self, other): return super().__eq__(other) and \ self.query_class == other.query_class def __hash__(self): return hash((self.pattern, tuple(self.fields), self.query_class)) class MutableCollectionQuery(CollectionQuery): """A collection query whose subqueries may be modified after the query is initialized. """ def __setitem__(self, key, value): self.subqueries[key] = value def __delitem__(self, key): del self.subqueries[key] class AndQuery(MutableCollectionQuery): """A conjunction of a list of other queries.""" def clause(self): return self.clause_with_joiner('and') def match(self, item): return all(q.match(item) for q in self.subqueries) class OrQuery(MutableCollectionQuery): """A conjunction of a list of other queries.""" def clause(self): return self.clause_with_joiner('or') def match(self, item): return any(q.match(item) for q in self.subqueries) class NotQuery(Query): """A query that matches the negation of its `subquery`, as a shorcut for performing `not(subquery)` without using regular expressions. """ def __init__(self, subquery): self.subquery = subquery def clause(self): clause, subvals = self.subquery.clause() if clause: return f'not ({clause})', subvals else: # If there is no clause, there is nothing to negate. All the logic # is handled by match() for slow queries. return clause, subvals def match(self, item): return not self.subquery.match(item) def __repr__(self): return "{0.__class__.__name__}({0.subquery!r})".format(self) def __eq__(self, other): return super().__eq__(other) and \ self.subquery == other.subquery def __hash__(self): return hash(('not', hash(self.subquery))) class TrueQuery(Query): """A query that always matches.""" def clause(self): return '1', () def match(self, item): return True class FalseQuery(Query): """A query that never matches.""" def clause(self): return '0', () def match(self, item): return False # Time/date queries. def _to_epoch_time(date): """Convert a `datetime` object to an integer number of seconds since the (local) Unix epoch. """ if hasattr(date, 'timestamp'): # The `timestamp` method exists on Python 3.3+. return int(date.timestamp()) else: epoch = datetime.fromtimestamp(0) delta = date - epoch return int(delta.total_seconds()) def _parse_periods(pattern): """Parse a string containing two dates separated by two dots (..). Return a pair of `Period` objects. """ parts = pattern.split('..', 1) if len(parts) == 1: instant = Period.parse(parts[0]) return (instant, instant) else: start = Period.parse(parts[0]) end = Period.parse(parts[1]) return (start, end) class Period: """A period of time given by a date, time and precision. Example: 2014-01-01 10:50:30 with precision 'month' represents all instants of time during January 2014. """ precisions = ('year', 'month', 'day', 'hour', 'minute', 'second') date_formats = ( ('%Y',), # year ('%Y-%m',), # month ('%Y-%m-%d',), # day ('%Y-%m-%dT%H', '%Y-%m-%d %H'), # hour ('%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M'), # minute ('%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S') # second ) relative_units = {'y': 365, 'm': 30, 'w': 7, 'd': 1} relative_re = '(?P<sign>[+|-]?)(?P<quantity>[0-9]+)' + \ '(?P<timespan>[y|m|w|d])' def __init__(self, date, precision): """Create a period with the given date (a `datetime` object) and precision (a string, one of "year", "month", "day", "hour", "minute", or "second"). """ if precision not in Period.precisions: raise ValueError(f'Invalid precision {precision}') self.date = date self.precision = precision @classmethod def parse(cls, string): """Parse a date and return a `Period` object or `None` if the string is empty, or raise an InvalidQueryArgumentValueError if the string cannot be parsed to a date. The date may be absolute or relative. Absolute dates look like `YYYY`, or `YYYY-MM-DD`, or `YYYY-MM-DD HH:MM:SS`, etc. Relative dates have three parts: - Optionally, a ``+`` or ``-`` sign indicating the future or the past. The default is the future. - A number: how much to add or subtract. - A letter indicating the unit: days, weeks, months or years (``d``, ``w``, ``m`` or ``y``). A "month" is exactly 30 days and a "year" is exactly 365 days. """ def find_date_and_format(string): for ord, format in enumerate(cls.date_formats): for format_option in format: try: date = datetime.strptime(string, format_option) return date, ord except ValueError: # Parsing failed. pass return (None, None) if not string: return None # Check for a relative date. match_dq = re.match(cls.relative_re, string) if match_dq: sign = match_dq.group('sign') quantity = match_dq.group('quantity') timespan = match_dq.group('timespan') # Add or subtract the given amount of time from the current # date. multiplier = -1 if sign == '-' else 1 days = cls.relative_units[timespan] date = datetime.now() + \ timedelta(days=int(quantity) * days) * multiplier return cls(date, cls.precisions[5]) # Check for an absolute date. date, ordinal = find_date_and_format(string) if date is None: raise InvalidQueryArgumentValueError(string, 'a valid date/time string') precision = cls.precisions[ordinal] return cls(date, precision) def open_right_endpoint(self): """Based on the precision, convert the period to a precise `datetime` for use as a right endpoint in a right-open interval. """ precision = self.precision date = self.date if 'year' == self.precision: return date.replace(year=date.year + 1, month=1) elif 'month' == precision: if (date.month < 12): return date.replace(month=date.month + 1) else: return date.replace(year=date.year + 1, month=1) elif 'day' == precision: return date + timedelta(days=1) elif 'hour' == precision: return date + timedelta(hours=1) elif 'minute' == precision: return date + timedelta(minutes=1) elif 'second' == precision: return date + timedelta(seconds=1) else: raise ValueError(f'unhandled precision {precision}') class DateInterval: """A closed-open interval of dates. A left endpoint of None means since the beginning of time. A right endpoint of None means towards infinity. """ def __init__(self, start, end): if start is not None and end is not None and not start < end: raise ValueError("start date {} is not before end date {}" .format(start, end)) self.start = start self.end = end @classmethod def from_periods(cls, start, end): """Create an interval with two Periods as the endpoints. """ end_date = end.open_right_endpoint() if end is not None else None start_date = start.date if start is not None else None return cls(start_date, end_date) def contains(self, date): if self.start is not None and date < self.start: return False if self.end is not None and date >= self.end: return False return True def __str__(self): return f'[{self.start}, {self.end})' class DateQuery(FieldQuery): """Matches date fields stored as seconds since Unix epoch time. Dates can be specified as ``year-month-day`` strings where only year is mandatory. The value of a date field can be matched against a date interval by using an ellipsis interval syntax similar to that of NumericQuery. """ def __init__(self, field, pattern, fast=True): super().__init__(field, pattern, fast) start, end = _parse_periods(pattern) self.interval = DateInterval.from_periods(start, end) def match(self, item): if self.field not in item: return False timestamp = float(item[self.field]) date = datetime.fromtimestamp(timestamp) return self.interval.contains(date) _clause_tmpl = "{0} {1} ?" def col_clause(self): clause_parts = [] subvals = [] if self.interval.start: clause_parts.append(self._clause_tmpl.format(self.field, ">=")) subvals.append(_to_epoch_time(self.interval.start)) if self.interval.end: clause_parts.append(self._clause_tmpl.format(self.field, "<")) subvals.append(_to_epoch_time(self.interval.end)) if clause_parts: # One- or two-sided interval. clause = ' AND '.join(clause_parts) else: # Match any date. clause = '1' return clause, subvals class DurationQuery(NumericQuery): """NumericQuery that allow human-friendly (M:SS) time interval formats. Converts the range(s) to a float value, and delegates on NumericQuery. Raises InvalidQueryError when the pattern does not represent an int, float or M:SS time interval. """ def _convert(self, s): """Convert a M:SS or numeric string to a float. Return None if `s` is empty. Raise an InvalidQueryError if the string cannot be converted. """ if not s: return None try: return util.raw_seconds_short(s) except ValueError: try: return float(s) except ValueError: raise InvalidQueryArgumentValueError( s, "a M:SS string or a float") # Sorting. class Sort: """An abstract class representing a sort operation for a query into the item database. """ def order_clause(self): """Generates a SQL fragment to be used in a ORDER BY clause, or None if no fragment is used (i.e., this is a slow sort). """ return None def sort(self, items): """Sort the list of objects and return a list. """ return sorted(items) def is_slow(self): """Indicate whether this query is *slow*, meaning that it cannot be executed in SQL and must be executed in Python. """ return False def __hash__(self): return 0 def __eq__(self, other): return type(self) == type(other) class MultipleSort(Sort): """Sort that encapsulates multiple sub-sorts. """ def __init__(self, sorts=None): self.sorts = sorts or [] def add_sort(self, sort): self.sorts.append(sort) def _sql_sorts(self): """Return the list of sub-sorts for which we can be (at least partially) fast. A contiguous suffix of fast (SQL-capable) sub-sorts are executable in SQL. The remaining, even if they are fast independently, must be executed slowly. """ sql_sorts = [] for sort in reversed(self.sorts): if not sort.order_clause() is None: sql_sorts.append(sort) else: break sql_sorts.reverse() return sql_sorts def order_clause(self): order_strings = [] for sort in self._sql_sorts(): order = sort.order_clause() order_strings.append(order) return ", ".join(order_strings) def is_slow(self): for sort in self.sorts: if sort.is_slow(): return True return False def sort(self, items): slow_sorts = [] switch_slow = False for sort in reversed(self.sorts): if switch_slow: slow_sorts.append(sort) elif sort.order_clause() is None: switch_slow = True slow_sorts.append(sort) else: pass for sort in slow_sorts: items = sort.sort(items) return items def __repr__(self): return f'MultipleSort({self.sorts!r})' def __hash__(self): return hash(tuple(self.sorts)) def __eq__(self, other): return super().__eq__(other) and \ self.sorts == other.sorts class FieldSort(Sort): """An abstract sort criterion that orders by a specific field (of any kind). """ def __init__(self, field, ascending=True, case_insensitive=True): self.field = field self.ascending = ascending self.case_insensitive = case_insensitive def sort(self, objs): # TODO: Conversion and null-detection here. In Python 3, # comparisons with None fail. We should also support flexible # attributes with different types without falling over. def key(item): field_val = item.get(self.field, '') if self.case_insensitive and isinstance(field_val, str): field_val = field_val.lower() return field_val return sorted(objs, key=key, reverse=not self.ascending) def __repr__(self): return '<{}: {}{}>'.format( type(self).__name__, self.field, '+' if self.ascending else '-', ) def __hash__(self): return hash((self.field, self.ascending)) def __eq__(self, other): return super().__eq__(other) and \ self.field == other.field and \ self.ascending == other.ascending class FixedFieldSort(FieldSort): """Sort object to sort on a fixed field. """ def order_clause(self): order = "ASC" if self.ascending else "DESC" if self.case_insensitive: field = '(CASE ' \ 'WHEN TYPEOF({0})="text" THEN LOWER({0}) ' \ 'WHEN TYPEOF({0})="blob" THEN LOWER({0}) ' \ 'ELSE {0} END)'.format(self.field) else: field = self.field return f"{field} {order}" class SlowFieldSort(FieldSort): """A sort criterion by some model field other than a fixed field: i.e., a computed or flexible field. """ def is_slow(self): return True class NullSort(Sort): """No sorting. Leave results unsorted.""" def sort(self, items): return items def __nonzero__(self): return self.__bool__() def __bool__(self): return False def __eq__(self, other): return type(self) == type(other) or other is None def __hash__(self): return 0<|fim▁end|>
def __hash__(self):
<|file_name|>pattern.js<|end_file_name|><|fim▁begin|>/* Mark special links * * Options: * external_links_open_new_window(boolean): Open external links in a new window. (false) * mark_special_links(boolean): Marks external or special protocl links with class. (true) * * Documentation: * # General * * Scan all links in the container and mark external links with class * if they point outside the site, or are special protocols. * Also implements new window opening for external links. * To disable this effect for links on a one-by-one-basis, * give them a class of 'link-plain' * * # Default external link example * * {{ example-1 }} * * # Open external link in new window * * {{ example-2 }} * * # Open external link in new window, without icons * * {{ example-3 }} * * # List of all protocol icons * * {{ example-4 }} * * Example: example-1 * <div class="pat-markspeciallinks"> * <ul> * <li>Find out What's new in <a href="http://www.plone.org">Plone</a>.</li> * <li>Plone is written in <a class="link-plain" href="http://www.python.org">Python</a>.</li> * <li>Plone builds on <a href="http://zope.org">Zope</a>.</li> * <li>Plone uses <a href="/">Mockup</a>.</li> * </ul> * </div> * * Example: example-2 * <div class="pat-markspeciallinks" data-pat-markspeciallinks='{"external_links_open_new_window": "true"}'> * <ul> * <li>Find out What's new in <a href="http://www.plone.org">Plone</a>.</li> * <li>Plone is written in <a class="link-plain" href="http://www.python.org">Python</a>.</li> * <li>Plone builds on <a href="http://zope.org">Zope</a>.</li> * <li>Plone uses <a href="/">Mockup</a>.</li> * </ul> * </div> * * Example: example-3 * <div class="pat-markspeciallinks" data-pat-markspeciallinks='{"external_links_open_new_window": "true", "mark_special_links": "false"}'> * <ul> * <li>Find out What's new in <a href="http://www.plone.org">Plone</a>.</li> * <li>Plone is written in <a class="link-plain" href="http://www.python.org">Python</a>.</li> * <li>Plone builds on <a href="http://zope.org">Zope</a>.</li> * <li>Plone uses <a href="/">Mockup</a>.</li> * </ul> * </div> * * Example: example-4 * <div class="pat-markspeciallinks"> * <ul> * <li><a href="http://www.plone.org">http</a></li> * <li><a href="https://www.plone.org">https</a></li> * <li><a href="mailto:[email protected]">mailto</a></li> * <li><a href="ftp://www.plone.org">ftp</a></li> * <li><a href="news://www.plone.org">news</a></li> * <li><a href="irc://www.plone.org">irc</a></li> * <li><a href="h323://www.plone.org">h323</a></li> * <li><a href="sip://www.plone.org">sip</a></li> * <li><a href="callto://www.plone.org">callto</a></li> * <li><a href="feed://www.plone.org">feed</a></li> * <li><a href="webcal://www.plone.org">webcal</a></li> * </ul> * </div> * */ define(["pat-base", "jquery"], function (Base, $) { "use strict"; var MarkSpecialLinks = Base.extend({ name: "markspeciallinks", trigger: ".pat-markspeciallinks", parser: "mockup", defaults: { external_links_open_new_window: false, mark_special_links: true, }, init: function () { var self = this, $el = self.$el; // first make external links open in a new window, afterwards do the // normal plone link wrapping in only the content area var elonw, msl, url, protocols, contentarea, res; if ( typeof self.options.external_links_open_new_window === "string" ) { elonw = self.options.external_links_open_new_window.toLowerCase() === "true"; } else if ( typeof self.options.external_links_open_new_window === "boolean" ) { elonw = self.options.external_links_open_new_window; } if (typeof self.options.mark_special_links === "string") { msl = self.options.mark_special_links.toLowerCase() === "true"; } else if (typeof self.options.mark_special_links === "boolean") { msl = self.options.mark_special_links; } url = window.location.protocol + "//" + window.location.host; protocols = /^(mailto|ftp|news|irc|h323|sip|callto|https|feed|webcal)/; contentarea = $el; if (elonw) { // all http links (without the link-plain class), not within this site contentarea .find( 'a[href^="http"]:not(.link-plain):not([href^="' + url + '"])' ) .attr("target", "_blank") .attr("rel", "noopener"); } if (msl) { // All links with an http href (without the link-plain class), not within this site,<|fim▁hole|> url + '"]):not(:has(img))' ) .before('<i class="glyphicon link-external"></i>'); // All links without an http href (without the link-plain class), not within this site, // and no img children should be wrapped in a link-[protocol] span contentarea .find( 'a[href]:not([href^="http:"]):not(.link-plain):not([href^="' + url + '"]):not(:has(img)):not([href^="#"])' ) .each(function () { // those without a http link may have another interesting protocol // wrap these in a link-[protocol] span res = protocols.exec($(this).attr("href")); if (res) { var iconclass = "glyphicon link-" + res[0]; $(this).before('<i class="' + iconclass + '"></i>'); } }); } }, }); return MarkSpecialLinks; });<|fim▁end|>
// and no img children should be wrapped in a link-external span contentarea .find( 'a[href^="http:"]:not(.link-plain):not([href^="' +
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import os from carton.cart import Cart from django.contrib import messages from django.shortcuts import render, redirect from django.template.response import TemplateResponse from django.views.generic import View from authentication.views import LoginRequiredMixin from deals.models import Deal from .models import UserShippingDetails class CheckoutView(LoginRequiredMixin, View): """ Creates a checkout page. Attributes: template_name: name of the template that renders the view stripe_secret_api_key: the secret API key for our stripe account stripe_publishable_api_key: the publishable API key """ template_name = 'cart/checkout.html' stripe_secret_api_key = os.getenv('STRIPE_SECRET_API_KEY') stripe_publishable_api_key = os.getenv('STRIPE_PUBLISHABLE_API_KEY') def get(self, request, **kwargs): """ Create checkout page. Gets shopping information from cart and sends it to the payment app in form of a dict. It then renders the checkout template which can then be used to pay.<|fim▁hole|> Returns: A template rendered with the payment details context """ cart = Cart(request.session) amount = cart.total amount_in_cents = int(amount) * 100 title = "Total payment expected" description = "Troupon shopping" payment_details = { "title": title, "key": self.stripe_publishable_api_key, "amount": amount_in_cents, "description": description, "currency": "usd", } request.session['payment_details'] = payment_details context = { "amount": amount, "title": title, "description": description, "payment_details": payment_details, } return render(request, self.template_name, context) class AddToCartView(LoginRequiredMixin, View): """ Add items to cart. When a logged in person clicks on Add to cart on a deal, this view adds the item to the cart. Attributes: LoginRequiredMixin: Ensures the user is logged in View: Normal django view """ def post(self, request, **kwargs): """ Add item to cart. Args: request: The incoming post request object **kwargs: Any keyword arguments passed to the function Returns: A redirect to the deals homepage """ dealid = request.POST.get('dealid') deal = Deal.objects.get(id=dealid) cart = Cart(request.session) cart.add(deal, price=deal.price) return redirect('/') class AddShippingDetails(LoginRequiredMixin, View): """ Add shipping details of user. When a logged in user clicks on proceed to checkout this view gets the shipping details of the user Attributes: LoginRequiredMixin: Ensures the user is logged in View: Normal django view """ def get(self, request): cart = Cart(request.session) context = {'cart': cart} return TemplateResponse(request, 'cart/shipping.html', context) def post(self, request, **kwargs): """ Add shipping details. Args: request: The incoming post request object **kwargs: Any keyword arguments passed to the function Returns: A redirect to the checkout page """ user = request.user street = request.POST.get('street') state = request.POST.get('state') postal = request.POST.get('postal') telephone = request.POST.get('telephone') shipping = UserShippingDetails(user=user, street=street, postal=postal, state=state, telephone=telephone) shipping.save() cart = Cart(request.session) context = {'cart': cart} return TemplateResponse(request, 'cart/checkout.html', context) class ViewCartView(LoginRequiredMixin, View): """ Allow user to view all the items in the cart. A logged in user with items in the cart can see a summary of them and their prices. Attributes: LoginRequiredMixin: Ensures the user is logged in View: Normal django view """ def get(self, request, **kwargs): """ Show cart items. Args: request: The incoming get request object **kwargs: Any keyword arguments passed to the function Returns: A template rendered with all the cart items. """ cart = Cart(request.session) context = {'cart': cart} return TemplateResponse(request, 'cart/cart.html', context) class ClearCartView(LoginRequiredMixin, View): """ Clear items in cart. When triggered, removes every item in the cart session and leaves it empty. Attributes: LoginRequiredMixin: Ensures the user is logged in View: Normal django view """ def get(self, request, **kwargs): """ Get cart from session and remove everything from it. Args: request: The incoming get request object **kwargs: Any keyword arguments passed to the function Returns: A redirect to the deals homepage """ cart = Cart(request.session) cart.clear() return redirect('/') class RemoveItemView(LoginRequiredMixin, View): """ Remove item from cart. When triggered, removes a particular item from the cart session based on its id. Attributes: LoginRequiredMixin: Ensures the user is logged in View: Normal django view """ def post(self, request, **kwargs): """ Remove item from cart. Args: request: The incoming get request object **kwargs: Any keyword arguments passed to the function Returns: A redirect to the deals homepage """ dealid = request.POST.get('dealid') deal = Deal.objects.get(id=dealid) cart = Cart(request.session) cart.remove(deal) return redirect('/')<|fim▁end|>
Args: request: The incoming get request object **kwargs: Any keyword arguments passed to the function
<|file_name|>test_deprecations.py<|end_file_name|><|fim▁begin|>""" Tests related to deprecation warnings. Also a convenient place to document how deprecations should eventually be turned into errors. """ from __future__ import division, absolute_import, print_function import datetime import sys import operator import warnings import pytest import shutil import tempfile import numpy as np from numpy.testing import ( assert_raises, assert_warns, assert_, assert_array_equal ) from numpy.core._multiarray_tests import fromstring_null_term_c_api try: import pytz _has_pytz = True except ImportError: _has_pytz = False class _DeprecationTestCase(object): # Just as warning: warnings uses re.match, so the start of this message # must match. message = '' warning_cls = DeprecationWarning def setup(self): self.warn_ctx = warnings.catch_warnings(record=True) self.log = self.warn_ctx.__enter__() # Do *not* ignore other DeprecationWarnings. Ignoring warnings # can give very confusing results because of # https://bugs.python.org/issue4180 and it is probably simplest to # try to keep the tests cleanly giving only the right warning type. # (While checking them set to "error" those are ignored anyway) # We still have them show up, because otherwise they would be raised warnings.filterwarnings("always", category=self.warning_cls) warnings.filterwarnings("always", message=self.message, category=self.warning_cls) def teardown(self): self.warn_ctx.__exit__() def assert_deprecated(self, function, num=1, ignore_others=False, function_fails=False, exceptions=np._NoValue, args=(), kwargs={}): """Test if DeprecationWarnings are given and raised. This first checks if the function when called gives `num` DeprecationWarnings, after that it tries to raise these DeprecationWarnings and compares them with `exceptions`. The exceptions can be different for cases where this code path is simply not anticipated and the exception is replaced. Parameters ---------- function : callable The function to test num : int Number of DeprecationWarnings to expect. This should normally be 1. ignore_others : bool Whether warnings of the wrong type should be ignored (note that the message is not checked) function_fails : bool If the function would normally fail, setting this will check for warnings inside a try/except block. exceptions : Exception or tuple of Exceptions Exception to expect when turning the warnings into an error. The default checks for DeprecationWarnings. If exceptions is empty the function is expected to run successfully. args : tuple Arguments for `function` kwargs : dict Keyword arguments for `function` """ # reset the log self.log[:] = [] if exceptions is np._NoValue: exceptions = (self.warning_cls,) try: function(*args, **kwargs) except (Exception if function_fails else tuple()): pass # just in case, clear the registry num_found = 0 for warning in self.log: if warning.category is self.warning_cls: num_found += 1 elif not ignore_others: raise AssertionError( "expected %s but got: %s" % (self.warning_cls.__name__, warning.category)) if num is not None and num_found != num: msg = "%i warnings found but %i expected." % (len(self.log), num) lst = [str(w) for w in self.log] raise AssertionError("\n".join([msg] + lst)) with warnings.catch_warnings(): warnings.filterwarnings("error", message=self.message, category=self.warning_cls) try: function(*args, **kwargs) if exceptions != tuple(): raise AssertionError( "No error raised during function call") except exceptions: if exceptions == tuple(): raise AssertionError( "Error raised during function call") def assert_not_deprecated(self, function, args=(), kwargs={}): """Test that warnings are not raised. This is just a shorthand for: self.assert_deprecated(function, num=0, ignore_others=True, exceptions=tuple(), args=args, kwargs=kwargs) """ self.assert_deprecated(function, num=0, ignore_others=True, exceptions=tuple(), args=args, kwargs=kwargs) class _VisibleDeprecationTestCase(_DeprecationTestCase): warning_cls = np.VisibleDeprecationWarning class TestNonTupleNDIndexDeprecation(object): def test_basic(self): a = np.zeros((5, 5)) with warnings.catch_warnings(): warnings.filterwarnings('always') assert_warns(FutureWarning, a.__getitem__, [[0, 1], [0, 1]]) assert_warns(FutureWarning, a.__getitem__, [slice(None)]) warnings.filterwarnings('error') assert_raises(FutureWarning, a.__getitem__, [[0, 1], [0, 1]]) assert_raises(FutureWarning, a.__getitem__, [slice(None)]) # a a[[0, 1]] always was advanced indexing, so no error/warning a[[0, 1]] class TestComparisonDeprecations(_DeprecationTestCase): """This tests the deprecation, for non-element-wise comparison logic. This used to mean that when an error occurred during element-wise comparison (i.e. broadcasting) NotImplemented was returned, but also in the comparison itself, False was given instead of the error. Also test FutureWarning for the None comparison. """ message = "elementwise.* comparison failed; .*" def test_normal_types(self): for op in (operator.eq, operator.ne): # Broadcasting errors: self.assert_deprecated(op, args=(np.zeros(3), [])) a = np.zeros(3, dtype='i,i') # (warning is issued a couple of times here) self.assert_deprecated(op, args=(a, a[:-1]), num=None) # Element comparison error (numpy array can't be compared). a = np.array([1, np.array([1,2,3])], dtype=object) b = np.array([1, np.array([1,2,3])], dtype=object) self.assert_deprecated(op, args=(a, b), num=None) def test_string(self): # For two string arrays, strings always raised the broadcasting error: a = np.array(['a', 'b']) b = np.array(['a', 'b', 'c']) assert_raises(ValueError, lambda x, y: x == y, a, b) # The empty list is not cast to string, and this used to pass due # to dtype mismatch; now (2018-06-21) it correctly leads to a # FutureWarning. assert_warns(FutureWarning, lambda: a == []) def test_void_dtype_equality_failures(self): class NotArray(object): def __array__(self): raise TypeError # Needed so Python 3 does not raise DeprecationWarning twice. def __ne__(self, other): return NotImplemented self.assert_deprecated(lambda: np.arange(2) == NotArray()) self.assert_deprecated(lambda: np.arange(2) != NotArray()) struct1 = np.zeros(2, dtype="i4,i4") struct2 = np.zeros(2, dtype="i4,i4,i4") assert_warns(FutureWarning, lambda: struct1 == 1) assert_warns(FutureWarning, lambda: struct1 == struct2) assert_warns(FutureWarning, lambda: struct1 != 1) assert_warns(FutureWarning, lambda: struct1 != struct2) def test_array_richcompare_legacy_weirdness(self): # It doesn't really work to use assert_deprecated here, b/c part of # the point of assert_deprecated is to check that when warnings are # set to "error" mode then the error is propagated -- which is good! # But here we are testing a bunch of code that is deprecated *because* # it has the habit of swallowing up errors and converting them into # different warnings. So assert_warns will have to be sufficient. assert_warns(FutureWarning, lambda: np.arange(2) == "a") assert_warns(FutureWarning, lambda: np.arange(2) != "a") # No warning for scalar comparisons with warnings.catch_warnings(): warnings.filterwarnings("error") assert_(not (np.array(0) == "a")) assert_(np.array(0) != "a") assert_(not (np.int16(0) == "a")) assert_(np.int16(0) != "a") for arg1 in [np.asarray(0), np.int16(0)]: struct = np.zeros(2, dtype="i4,i4") for arg2 in [struct, "a"]: for f in [operator.lt, operator.le, operator.gt, operator.ge]: if sys.version_info[0] >= 3: # py3 with warnings.catch_warnings() as l: warnings.filterwarnings("always") assert_raises(TypeError, f, arg1, arg2) assert_(not l) else: # py2 assert_warns(DeprecationWarning, f, arg1, arg2) class TestDatetime64Timezone(_DeprecationTestCase): """Parsing of datetime64 with timezones deprecated in 1.11.0, because datetime64 is now timezone naive rather than UTC only. It will be quite a while before we can remove this, because, at the very least, a lot of existing code uses the 'Z' modifier to avoid conversion from local time to UTC, even if otherwise it handles time in a timezone naive fashion. """ def test_string(self): self.assert_deprecated(np.datetime64, args=('2000-01-01T00+01',)) self.assert_deprecated(np.datetime64, args=('2000-01-01T00Z',)) @pytest.mark.skipif(not _has_pytz, reason="The pytz module is not available.") def test_datetime(self): tz = pytz.timezone('US/Eastern') dt = datetime.datetime(2000, 1, 1, 0, 0, tzinfo=tz) self.assert_deprecated(np.datetime64, args=(dt,)) class TestNonCContiguousViewDeprecation(_DeprecationTestCase): """View of non-C-contiguous arrays deprecated in 1.11.0. The deprecation will not be raised for arrays that are both C and F contiguous, as C contiguous is dominant. There are more such arrays with relaxed stride checking than without so the deprecation is not as visible with relaxed stride checking in force. """ def test_fortran_contiguous(self): self.assert_deprecated(np.ones((2,2)).T.view, args=(complex,)) self.assert_deprecated(np.ones((2,2)).T.view, args=(np.int8,)) class TestInvalidOrderParameterInputForFlattenArrayDeprecation(_DeprecationTestCase): """Invalid arguments to the ORDER parameter in array.flatten() should not be allowed and should raise an error. However, in the interests of not breaking code that may inadvertently pass invalid arguments to this parameter, a DeprecationWarning will be issued instead for the time being to give developers time to refactor relevant code. """ def test_flatten_array_non_string_arg(self): x = np.zeros((3, 5)) self.message = ("Non-string object detected for " "the array ordering. Please pass " "in 'C', 'F', 'A', or 'K' instead") self.assert_deprecated(x.flatten, args=(np.pi,)) def test_flatten_array_invalid_string_arg(self): # Tests that a DeprecationWarning is raised # when a string of length greater than one # starting with "C", "F", "A", or "K" (case- # and unicode-insensitive) is passed in for # the ORDER parameter. Otherwise, a TypeError # will be raised! x = np.zeros((3, 5)) self.message = ("Non length-one string passed " "in for the array ordering. Please " "pass in 'C', 'F', 'A', or 'K' instead") self.assert_deprecated(x.flatten, args=("FACK",)) class TestArrayDataAttributeAssignmentDeprecation(_DeprecationTestCase): """Assigning the 'data' attribute of an ndarray is unsafe as pointed out in gh-7093. Eventually, such assignment should NOT be allowed, but in the interests of maintaining backwards compatibility, only a Deprecation- Warning will be raised instead for the time being to give developers time to refactor relevant code. """ def test_data_attr_assignment(self): a = np.arange(10) b = np.linspace(0, 1, 10) self.message = ("Assigning the 'data' attribute is an " "inherently unsafe operation and will " "be removed in the future.") self.assert_deprecated(a.__setattr__, args=('data', b.data)) class TestLinspaceInvalidNumParameter(_DeprecationTestCase): """Argument to the num parameter in linspace that cannot be safely interpreted as an integer is deprecated in 1.12.0. Argument to the num parameter in linspace that cannot be safely interpreted as an integer should not be allowed. In the interest of not breaking code that passes an argument that could still be interpreted as an integer, a DeprecationWarning will be issued for the time being to give developers time to refactor relevant code. """ def test_float_arg(self): # 2016-02-25, PR#7328 self.assert_deprecated(np.linspace, args=(0, 10, 2.5)) <|fim▁hole|> the function used to silently ignore the parameter and return a representation using the minimal number of bits needed for the form in question. Such behavior is now considered unsafe from a user perspective and will raise an error in the future. """ def test_insufficient_width_positive(self): args = (10,) kwargs = {'width': 2} self.message = ("Insufficient bit width provided. This behavior " "will raise an error in the future.") self.assert_deprecated(np.binary_repr, args=args, kwargs=kwargs) def test_insufficient_width_negative(self): args = (-5,) kwargs = {'width': 2} self.message = ("Insufficient bit width provided. This behavior " "will raise an error in the future.") self.assert_deprecated(np.binary_repr, args=args, kwargs=kwargs) class TestNumericStyleTypecodes(_DeprecationTestCase): """ Deprecate the old numeric-style dtypes, which are especially confusing for complex types, e.g. Complex32 -> complex64. When the deprecation cycle is complete, the check for the strings should be removed from PyArray_DescrConverter in descriptor.c, and the deprecated keys should not be added as capitalized aliases in _add_aliases in numerictypes.py. """ def test_all_dtypes(self): deprecated_types = [ 'Bool', 'Complex32', 'Complex64', 'Float16', 'Float32', 'Float64', 'Int8', 'Int16', 'Int32', 'Int64', 'Object0', 'Timedelta64', 'UInt8', 'UInt16', 'UInt32', 'UInt64', 'Void0' ] if sys.version_info[0] < 3: deprecated_types.extend(['Unicode0', 'String0']) for dt in deprecated_types: self.assert_deprecated(np.dtype, exceptions=(TypeError,), args=(dt,)) class TestTestDeprecated(object): def test_assert_deprecated(self): test_case_instance = _DeprecationTestCase() test_case_instance.setup() assert_raises(AssertionError, test_case_instance.assert_deprecated, lambda: None) def foo(): warnings.warn("foo", category=DeprecationWarning, stacklevel=2) test_case_instance.assert_deprecated(foo) test_case_instance.teardown() class TestClassicIntDivision(_DeprecationTestCase): """ See #7949. Deprecate the numeric-style dtypes with -3 flag in python 2 if used for division List of data types: https://docs.scipy.org/doc/numpy/user/basics.types.html """ def test_int_dtypes(self): #scramble types and do some mix and match testing deprecated_types = [ 'bool_', 'int_', 'intc', 'uint8', 'int8', 'uint64', 'int32', 'uint16', 'intp', 'int64', 'uint32', 'int16' ] if sys.version_info[0] < 3 and sys.py3kwarning: import operator as op dt2 = 'bool_' for dt1 in deprecated_types: a = np.array([1,2,3], dtype=dt1) b = np.array([1,2,3], dtype=dt2) self.assert_deprecated(op.div, args=(a,b)) dt2 = dt1 class TestNonNumericConjugate(_DeprecationTestCase): """ Deprecate no-op behavior of ndarray.conjugate on non-numeric dtypes, which conflicts with the error behavior of np.conjugate. """ def test_conjugate(self): for a in np.array(5), np.array(5j): self.assert_not_deprecated(a.conjugate) for a in (np.array('s'), np.array('2016', 'M'), np.array((1, 2), [('a', int), ('b', int)])): self.assert_deprecated(a.conjugate) class TestNPY_CHAR(_DeprecationTestCase): # 2017-05-03, 1.13.0 def test_npy_char_deprecation(self): from numpy.core._multiarray_tests import npy_char_deprecation self.assert_deprecated(npy_char_deprecation) assert_(npy_char_deprecation() == 'S1') class TestPyArray_AS1D(_DeprecationTestCase): def test_npy_pyarrayas1d_deprecation(self): from numpy.core._multiarray_tests import npy_pyarrayas1d_deprecation assert_raises(NotImplementedError, npy_pyarrayas1d_deprecation) class TestPyArray_AS2D(_DeprecationTestCase): def test_npy_pyarrayas2d_deprecation(self): from numpy.core._multiarray_tests import npy_pyarrayas2d_deprecation assert_raises(NotImplementedError, npy_pyarrayas2d_deprecation) class Test_UPDATEIFCOPY(_DeprecationTestCase): """ v1.14 deprecates creating an array with the UPDATEIFCOPY flag, use WRITEBACKIFCOPY instead """ def test_npy_updateifcopy_deprecation(self): from numpy.core._multiarray_tests import npy_updateifcopy_deprecation arr = np.arange(9).reshape(3, 3) v = arr.T self.assert_deprecated(npy_updateifcopy_deprecation, args=(v,)) class TestDatetimeEvent(_DeprecationTestCase): # 2017-08-11, 1.14.0 def test_3_tuple(self): for cls in (np.datetime64, np.timedelta64): # two valid uses - (unit, num) and (unit, num, den, None) self.assert_not_deprecated(cls, args=(1, ('ms', 2))) self.assert_not_deprecated(cls, args=(1, ('ms', 2, 1, None))) # trying to use the event argument, removed in 1.7.0, is deprecated # it used to be a uint8 self.assert_deprecated(cls, args=(1, ('ms', 2, 'event'))) self.assert_deprecated(cls, args=(1, ('ms', 2, 63))) self.assert_deprecated(cls, args=(1, ('ms', 2, 1, 'event'))) self.assert_deprecated(cls, args=(1, ('ms', 2, 1, 63))) class TestTruthTestingEmptyArrays(_DeprecationTestCase): # 2017-09-25, 1.14.0 message = '.*truth value of an empty array is ambiguous.*' def test_1d(self): self.assert_deprecated(bool, args=(np.array([]),)) def test_2d(self): self.assert_deprecated(bool, args=(np.zeros((1, 0)),)) self.assert_deprecated(bool, args=(np.zeros((0, 1)),)) self.assert_deprecated(bool, args=(np.zeros((0, 0)),)) class TestBincount(_DeprecationTestCase): # 2017-06-01, 1.14.0 def test_bincount_minlength(self): self.assert_deprecated(lambda: np.bincount([1, 2, 3], minlength=None)) class TestAlen(_DeprecationTestCase): # 2019-08-02, 1.18.0 def test_alen(self): self.assert_deprecated(lambda: np.alen(np.array([1, 2, 3]))) class TestGeneratorSum(_DeprecationTestCase): # 2018-02-25, 1.15.0 def test_generator_sum(self): self.assert_deprecated(np.sum, args=((i for i in range(5)),)) class TestSctypeNA(_VisibleDeprecationTestCase): # 2018-06-24, 1.16 def test_sctypeNA(self): self.assert_deprecated(lambda: np.sctypeNA['?']) self.assert_deprecated(lambda: np.typeNA['?']) self.assert_deprecated(lambda: np.typeNA.get('?')) class TestPositiveOnNonNumerical(_DeprecationTestCase): # 2018-06-28, 1.16.0 def test_positive_on_non_number(self): self.assert_deprecated(operator.pos, args=(np.array('foo'),)) class TestFromstring(_DeprecationTestCase): # 2017-10-19, 1.14 def test_fromstring(self): self.assert_deprecated(np.fromstring, args=('\x00'*80,)) class TestFromStringAndFileInvalidData(_DeprecationTestCase): # 2019-06-08, 1.17.0 # Tests should be moved to real tests when deprecation is done. message = "string or file could not be read to its end" @pytest.mark.parametrize("invalid_str", [",invalid_data", "invalid_sep"]) def test_deprecate_unparsable_data_file(self, invalid_str): x = np.array([1.51, 2, 3.51, 4], dtype=float) with tempfile.TemporaryFile(mode="w") as f: x.tofile(f, sep=',', format='%.2f') f.write(invalid_str) f.seek(0) self.assert_deprecated(lambda: np.fromfile(f, sep=",")) f.seek(0) self.assert_deprecated(lambda: np.fromfile(f, sep=",", count=5)) # Should not raise: with warnings.catch_warnings(): warnings.simplefilter("error", DeprecationWarning) f.seek(0) res = np.fromfile(f, sep=",", count=4) assert_array_equal(res, x) @pytest.mark.parametrize("invalid_str", [",invalid_data", "invalid_sep"]) def test_deprecate_unparsable_string(self, invalid_str): x = np.array([1.51, 2, 3.51, 4], dtype=float) x_str = "1.51,2,3.51,4{}".format(invalid_str) self.assert_deprecated(lambda: np.fromstring(x_str, sep=",")) self.assert_deprecated(lambda: np.fromstring(x_str, sep=",", count=5)) # The C-level API can use not fixed size, but 0 terminated strings, # so test that as well: bytestr = x_str.encode("ascii") self.assert_deprecated(lambda: fromstring_null_term_c_api(bytestr)) with assert_warns(DeprecationWarning): # this is slightly strange, in that fromstring leaves data # potentially uninitialized (would be good to error when all is # read, but count is larger then actual data maybe). res = np.fromstring(x_str, sep=",", count=5) assert_array_equal(res[:-1], x) with warnings.catch_warnings(): warnings.simplefilter("error", DeprecationWarning) # Should not raise: res = np.fromstring(x_str, sep=",", count=4) assert_array_equal(res, x) class Test_GetSet_NumericOps(_DeprecationTestCase): # 2018-09-20, 1.16.0 def test_get_numeric_ops(self): from numpy.core._multiarray_tests import getset_numericops self.assert_deprecated(getset_numericops, num=2) # empty kwargs prevents any state actually changing which would break # other tests. self.assert_deprecated(np.set_numeric_ops, kwargs={}) assert_raises(ValueError, np.set_numeric_ops, add='abc') class TestShape1Fields(_DeprecationTestCase): warning_cls = FutureWarning # 2019-05-20, 1.17.0 def test_shape_1_fields(self): self.assert_deprecated(np.dtype, args=([('a', int, 1)],)) class TestNonZero(_DeprecationTestCase): # 2019-05-26, 1.17.0 def test_zerod(self): self.assert_deprecated(lambda: np.nonzero(np.array(0))) self.assert_deprecated(lambda: np.nonzero(np.array(1)))<|fim▁end|>
class TestBinaryReprInsufficientWidthParameterForRepresentation(_DeprecationTestCase): """ If a 'width' parameter is passed into ``binary_repr`` that is insufficient to represent the number in base 2 (positive) or 2's complement (negative) form,
<|file_name|>zfs_permissions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Nate Coraor <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = ''' --- module: zfs_permissions short_description: Manage zfs administrative permissions description: - Manages ZFS file system administrative permissions on Solaris and FreeBSD. See zfs(1M) for more information about the properties. version_added: "1.10" options: name: description: - File system or volume name e.g. C(rpool/myfs) required: true state: description: - Whether to allow (C(present)), or unallow (C(absent)) a permission. required: true choices: [present, absent] users: description: - Users to whom permission(s) should be granted, separated by commas. required: false groups: description: - Groups to whom permission(s) should be granted, separated by commas. required: false everyone: description: - Apply permissions to everyone. required: false default: false choices: ['on','off'] permissions: description: - The permission(s) to delegate, separated by commas (required if C(state) is C(present)) required: false choices: ['allow','clone','create','destroy',...] local: description: - Apply permissions to C(name) "locally" (C(zfs allow -l)) required: false default: null choices: ['on','off'] descendents: description: - Apply permissions to C(name)'s descendents (C(zfs allow -d)) required: false default: null choices: ['on','off'] recursive: description: - Unallow permissions recursively (ignored when C(state) is C(present)) required: false default: false choices: ['on','off'] author: "Nate Coraor (@natefoo)" ''' EXAMPLES = ''' # Grant `zfs allow` and `unallow` permission to the `adm` user with local+descendents scope - zfs_permissions: name=rpool/myfs users=adm permissions=allow,unallow # Grant `zfs send` to everyone, plus the group `backup` - zfs_permissions: name=rpool/myvol groups=backup everyone=yes permissions=send # Grant `zfs send,receive` to users `foo` and `bar` with local scope only - zfs_permissions: name=rpool/myfs users=foo,bar permissions=send,receive local=yes # Revoke all permissions from everyone (permissions specifically assigned to users and groups remain) - zfs_permissions: name=rpool/myfs state=absent everyone=yes ''' import sys class ZfsPermissions(object): def __init__(self, module): self.module = module self.name = module.params.get('name') self.state = module.params.get('state') self.users = module.params.get('users') or [] self.groups = module.params.get('groups') or [] self.everyone = module.boolean(module.params.get('everyone')) self.perms = module.params.get('permissions') or [] self.recursive = module.boolean(module.params.get('recursive')) self.scope = None self.changed = False self.__current_perms = None if self.state == 'present' and not self.perms: self.module.fail_json(msg='The `permissions` option is required for state=present') if self.state == 'present' and not (self.users or self.groups or self.everyone): self.module.fail_json(msg='One of `users`, `groups`, or `everyone` must be set') for splittable in ('users', 'groups', 'perms'): if getattr(self, splittable): setattr(self, splittable, getattr(self, splittable).split(',')) local = module.boolean(module.params.get('local')) descendents = module.boolean(module.params.get('descendents')) if (local and descendents) or (not local and not descendents): self.scope = 'ld' elif local: self.scope = 'l' elif descendents: self.scope = 'd' else: self.module.fail_json(msg='Impossible value for local and descendents') self.subcommand = 'allow' self.recursive_opt = [] if self.state == 'absent': self.subcommand = 'unallow' if self.recursive: self.recursive_opt = ['-r'] else: self.recursive_opt = [] self.run() @property def current_perms(self): if self.__current_perms is None: rc, out, err = self.run_command(['zfs', 'allow', self.name]) if rc: self.module.fail_json(msg='Getting permissions for %s failed: %s' % (self.name, err)) perms = dict(l = dict(u=dict(), g=dict(), e=[]), d = dict(u=dict(), g=dict(), e=[]), ld = dict(u=dict(), g=dict(), e=[])) reading = None for line in out.splitlines(): if line == 'Local permissions:': reading = 'l' elif line == 'Descendent permissions:': reading = 'd' elif line == 'Local+Descendent permissions:': reading = 'ld' elif line.startswith('\tuser '): user, cur_perms = line.split()[1:3] perms[reading]['u'][user] = cur_perms.split(',') elif line.startswith('\tgroup '): group, cur_perms = line.split()[1:3] perms[reading]['g'][group] = cur_perms.split(',') elif line.startswith('\teveryone '): perms[reading]['e'] = line.split()[1].split(',') self.__current_perms = perms return self.__current_perms def run_command(self, cmd): progname = cmd[0] cmd[0] = self.module.get_bin_path(progname, True) return self.module.run_command(cmd) def change_required(self, ent_type): # zfs allow/unallow are idempotent, so we only need to do this for Ansible's changed flag rval = [] if ent_type == 'u': entities = self.users elif ent_type == 'g': entities = self.groups for ent in entities: ent_perms = self.current_perms[self.scope][ent_type].get(ent, None) if self.state == 'present' and ent_perms is None:<|fim▁hole|> elif ent_perms is not None: for perm in self.perms: if ((self.state == 'present' and perm not in ent_perms) or (self.state == 'absent' and perm in ent_perms)): # at least one desired permission is absent, or # at least one undesired permission is present rval.append(ent) break return rval def run(self): def run_cmd(args): cmd = ['zfs', self.subcommand] + self.recursive_opt + ['-%s' % self.scope] + args if self.perms: cmd = cmd + [','.join(self.perms)] cmd = cmd + [self.name] if self.module.check_mode: return 'Check mode skipped execution of: %s' % ' '.join(cmd) rc, out, err = self.run_command(cmd) if rc: msg = 'Changing permissions with `%s` failed: %s' % (' '.join(cmd), err) self.module.fail_json(msg=msg) return out stdout = '' for ent_type in ('u', 'g'): change = self.change_required(ent_type) if change: args = ['-%s' % ent_type, ','.join(change)] stdout += run_cmd(args) self.changed = True if self.everyone: everyone_perms = self.current_perms[self.scope]['e'] if self.state == 'absent' and not self.perms and everyone_perms: args = ['-e'] stdout += run_cmd(args) self.changed = True for perm in self.perms: if ((self.state == 'present' and perm not in everyone_perms) or (self.state == 'absent' and perm in everyone_perms)): # args = ['-e'] stdout += run_cmd(args) self.changed = True break exit_args = dict(changed=self.changed, state=self.state) if self.changed: exit_args.update(msg='ZFS permissions updated', stdout=stdout) self.module.exit_json(**exit_args) def main(): module = AnsibleModule( argument_spec = dict( name = dict(required=True), state = dict(default="present", choices=["absent", "present"]), users = dict(default=None), groups = dict(default=None), everyone = dict(default=False, choices=BOOLEANS), permissions = dict(default=None), local = dict(default=None, choices=BOOLEANS), descendents = dict(default=None, choices=BOOLEANS), recursive = dict(default=False, choices=BOOLEANS) ), supports_check_mode = True ) zfs_permissions = ZfsPermissions(module) sys.exit(0) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()<|fim▁end|>
rval.append(ent) elif self.state == 'absent' and ent_perms is not None: rval.append(ent)
<|file_name|>ping.go<|end_file_name|><|fim▁begin|>package server import (<|fim▁hole|> "github.com/gin-gonic/gin" ) func handlePing(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"hello": "world!", "goto": "https://github.com/treeder/functions"}) }<|fim▁end|>
"net/http"
<|file_name|>0004_area_of_circle.rs<|end_file_name|><|fim▁begin|>// Area of circle fn main() { let r = 5.0;<|fim▁hole|> println!("Value = {} ",c); }<|fim▁end|>
let pi = 3.14; let c = pi * r * r;
<|file_name|>PhotosNotesApi.java<|end_file_name|><|fim▁begin|>/* * Jinx is Copyright 2010-2020 by Jeremy Brooks and Contributors * * Jinx is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Jinx is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Jinx. If not, see <http://www.gnu.org/licenses/>. */ package net.jeremybrooks.jinx.api; import net.jeremybrooks.jinx.Jinx; import net.jeremybrooks.jinx.JinxException; import net.jeremybrooks.jinx.JinxUtils; import net.jeremybrooks.jinx.response.Response; import net.jeremybrooks.jinx.response.photos.notes.Note; import java.util.Map; import java.util.TreeMap; /** * Provides access to the flickr.photos.notes API methods. * * @author Jeremy Brooks<|fim▁hole|> */ public class PhotosNotesApi { private Jinx jinx; public PhotosNotesApi(Jinx jinx) { this.jinx = jinx; } /** * Add a note to a photo. Coordinates and sizes are in pixels, based on the 500px image size shown on individual photo pages. * <br> * This method requires authentication with 'write' permission. * * @param photoId (Required) The id of the photo to add a note to. * @param noteX (Required) The left coordinate of the note. * @param noteY (Required) The top coordinate of the note. * @param noteWidth (Required) The width of the note. * @param noteHeight (Required) The height of the note. * @param noteText (Required) The text of the note. * @return object with the ID for the newly created note. * @throws JinxException if required parameters are missing, or if there are any errors. * @see <a href="https://www.flickr.com/services/api/flickr.photos.notes.add.html">flickr.photos.notes.add</a> */ public Note add(String photoId, int noteX, int noteY, int noteWidth, int noteHeight, String noteText) throws JinxException { JinxUtils.validateParams(photoId, noteText); Map<String, String> params = new TreeMap<>(); params.put("method", "flickr.photos.notes.add"); params.put("photo_id", photoId); params.put("note_x", Integer.toString(noteX)); params.put("note_y", Integer.toString(noteY)); params.put("note_w", Integer.toString(noteWidth)); params.put("note_h", Integer.toString(noteHeight)); params.put("note_text", noteText); return jinx.flickrPost(params, Note.class); } /** * Edit a note on a photo. Coordinates and sizes are in pixels, based on the 500px image size shown on individual photo pages. * <br> * This method requires authentication with 'write' permission. * * @param noteId (Required) The id of the note to edit. * @param noteX (Required) The left coordinate of the note. * @param noteY (Required) The top coordinate of the note. * @param noteWidth (Required) The width of the note. * @param noteHeight (Required) The height of the note. * @param noteText (Required) The text of the note. * @return object with the status of the requested operation. * @throws JinxException if required parameters are missing, or if there are any errors. * @see <a href="https://www.flickr.com/services/api/flickr.photos.notes.edit.html">flickr.photos.notes.edit</a> */ public Response edit(String noteId, int noteX, int noteY, int noteWidth, int noteHeight, String noteText) throws JinxException { JinxUtils.validateParams(noteId, noteText); Map<String, String> params = new TreeMap<>(); params.put("method", "flickr.photos.notes.edit"); params.put("note_id", noteId); params.put("note_x", Integer.toString(noteX)); params.put("note_y", Integer.toString(noteY)); params.put("note_w", Integer.toString(noteWidth)); params.put("note_h", Integer.toString(noteHeight)); params.put("note_text", noteText); return jinx.flickrPost(params, Response.class); } /** * Delete a note from a photo. * <br> * This method requires authentication with 'write' permission. * * @param noteId (Required) The id of the note to delete. * @return object with the status of the requested operation. * @throws JinxException if required parameters are missing, or if there are any errors. * @see <a href="https://www.flickr.com/services/api/flickr.photos.notes.delete.html">flickr.photos.notes.delete</a> */ public Response delete(String noteId) throws JinxException { JinxUtils.validateParams(noteId); Map<String, String> params = new TreeMap<>(); params.put("method", "flickr.photos.notes.delete"); params.put("note_id", noteId); return jinx.flickrPost(params, Response.class); } }<|fim▁end|>
* @see <a href="https://www.flickr.com/services/api/">Flickr API documentation</a> for more details.
<|file_name|>about.client.config.js<|end_file_name|><|fim▁begin|>'use strict'; // Configuring the Articles module angular.module('about').run(['Menus',<|fim▁hole|>]);<|fim▁end|>
function(Menus) { // Set top bar menu items Menus.addMenuItem('mainmenu', 'About Us', 'about', 'left-margin', '/about-us', true, null, 3); }
<|file_name|>hooks.test.js<|end_file_name|><|fim▁begin|>'use strict'; /* jshint -W030 */ var chai = require('chai') , expect = chai.expect , Support = require(__dirname + '/../support') , DataTypes = require(__dirname + '/../../../lib/data-types') , Sequelize = Support.Sequelize , dialect = Support.getTestDialect() , sinon = require('sinon'); describe(Support.getTestDialectTeaser('Hooks'), function() { beforeEach(function() { this.User = this.sequelize.define('User', { username: { type: DataTypes.STRING, allowNull: false }, mood: { type: DataTypes.ENUM, values: ['happy', 'sad', 'neutral'] } }); this.ParanoidUser = this.sequelize.define('ParanoidUser', { username: DataTypes.STRING, mood: { type: DataTypes.ENUM, values: ['happy', 'sad', 'neutral'] } }, { paranoid: true }); return this.sequelize.sync({ force: true }); }); describe('#define', function() { before(function() { this.sequelize.addHook('beforeDefine', function(attributes, options) { options.modelName = 'bar'; options.name.plural = 'barrs'; attributes.type = DataTypes.STRING; }); this.sequelize.addHook('afterDefine', function(factory) { factory.options.name.singular = 'barr'; }); this.model = this.sequelize.define('foo', {name: DataTypes.STRING}); }); it('beforeDefine hook can change model name', function() { expect(this.model.name).to.equal('bar'); }); it('beforeDefine hook can alter options', function() { expect(this.model.options.name.plural).to.equal('barrs'); }); it('beforeDefine hook can alter attributes', function() { expect(this.model.rawAttributes.type).to.be.ok; }); it('afterDefine hook can alter options', function() { expect(this.model.options.name.singular).to.equal('barr'); }); after(function() { this.sequelize.options.hooks = {}; this.sequelize.modelManager.removeModel(this.model); }); }); describe('#init', function() { before(function() { Sequelize.addHook('beforeInit', function(config, options) { config.database = 'db2'; options.host = 'server9'; }); Sequelize.addHook('afterInit', function(sequelize) { sequelize.options.protocol = 'udp'; }); this.seq = new Sequelize('db', 'user', 'pass', { dialect : dialect }); }); it('beforeInit hook can alter config', function() { expect(this.seq.config.database).to.equal('db2'); }); it('beforeInit hook can alter options', function() { expect(this.seq.options.host).to.equal('server9'); }); it('afterInit hook can alter options', function() { expect(this.seq.options.protocol).to.equal('udp'); }); after(function() { Sequelize.options.hooks = {}; }); }); describe('passing DAO instances', function() { describe('beforeValidate / afterValidate', function() { it('should pass a DAO instance to the hook', function() { var beforeHooked = false; var afterHooked = false; var User = this.sequelize.define('User', { username: DataTypes.STRING }, { hooks: { beforeValidate: function(user, options, fn) { expect(user).to.be.instanceof(User); beforeHooked = true; fn(); }, afterValidate: function(user, options, fn) { expect(user).to.be.instanceof(User); afterHooked = true; fn(); } } }); return User.sync({ force: true }).then(function() { return User.create({ username: 'bob' }).then(function() { expect(beforeHooked).to.be.true; expect(afterHooked).to.be.true; }); }); }); }); describe('beforeCreate / afterCreate', function() { it('should pass a DAO instance to the hook', function() { var beforeHooked = false; var afterHooked = false; var User = this.sequelize.define('User', { username: DataTypes.STRING }, { hooks: { beforeCreate: function(user, options, fn) { expect(user).to.be.instanceof(User); beforeHooked = true; fn(); }, afterCreate: function(user, options, fn) { expect(user).to.be.instanceof(User); afterHooked = true; fn(); } } }); return User.sync({ force: true }).then(function() { return User.create({ username: 'bob' }).then(function() { expect(beforeHooked).to.be.true; expect(afterHooked).to.be.true; }); }); }); }); describe('beforeDestroy / afterDestroy', function() { it('should pass a DAO instance to the hook', function() { var beforeHooked = false; var afterHooked = false; var User = this.sequelize.define('User', { username: DataTypes.STRING }, { hooks: { beforeDestroy: function(user, options, fn) { expect(user).to.be.instanceof(User); beforeHooked = true; fn(); }, afterDestroy: function(user, options, fn) { expect(user).to.be.instanceof(User); afterHooked = true; fn(); } } }); return User.sync({ force: true }).then(function() { return User.create({ username: 'bob' }).then(function(user) { return user.destroy().then(function() { expect(beforeHooked).to.be.true; expect(afterHooked).to.be.true; }); }); }); }); }); describe('beforeDelete / afterDelete', function() { it('should pass a DAO instance to the hook', function() { var beforeHooked = false; var afterHooked = false; var User = this.sequelize.define('User', { username: DataTypes.STRING }, { hooks: { beforeDelete: function(user, options, fn) { expect(user).to.be.instanceof(User); beforeHooked = true; fn(); }, afterDelete: function(user, options, fn) { expect(user).to.be.instanceof(User); afterHooked = true; fn(); } } }); return User.sync({ force: true }).then(function() { return User.create({ username: 'bob' }).then(function(user) { return user.destroy().then(function() { expect(beforeHooked).to.be.true; expect(afterHooked).to.be.true; }); }); }); }); }); describe('beforeUpdate / afterUpdate', function() { it('should pass a DAO instance to the hook', function() { var beforeHooked = false; var afterHooked = false; var User = this.sequelize.define('User', { username: DataTypes.STRING }, { hooks: { beforeUpdate: function(user, options, fn) { expect(user).to.be.instanceof(User); beforeHooked = true; fn(); }, afterUpdate: function(user, options, fn) { expect(user).to.be.instanceof(User); afterHooked = true; fn(); } } }); return User.sync({ force: true }).then(function() { return User.create({ username: 'bob' }).then(function(user) { user.username = 'bawb'; return user.save({ fields: ['username'] }).then(function() { expect(beforeHooked).to.be.true; expect(afterHooked).to.be.true; }); }); }); }); }); }); describe('Model#sync', function() { describe('on success', function() { it('should run hooks', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy(); this.User.beforeSync(beforeHook); this.User.afterSync(afterHook); return this.User.sync().then(function() { expect(beforeHook).to.have.been.calledOnce; expect(afterHook).to.have.been.calledOnce; }); }); it('should not run hooks when "hooks = false" option passed', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy(); this.User.beforeSync(beforeHook); this.User.afterSync(afterHook); return this.User.sync({ hooks: false }).then(function() { expect(beforeHook).to.not.have.been.called; expect(afterHook).to.not.have.been.called; }); }); }); describe('on error', function() { it('should return an error from before', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy(); this.User.beforeSync(function(options) { beforeHook(); throw new Error('Whoops!'); }); this.User.afterSync(afterHook); return expect(this.User.sync()).to.be.rejected.then(function(err) { expect(beforeHook).to.have.been.calledOnce; expect(afterHook).not.to.have.been.called; }); }); it('should return an error from after', function() { var beforeHook = sinon.spy()<|fim▁hole|> this.User.afterSync(function(options) { afterHook(); throw new Error('Whoops!'); }); return expect(this.User.sync()).to.be.rejected.then(function(err) { expect(beforeHook).to.have.been.calledOnce; expect(afterHook).to.have.been.calledOnce; }); }); }); }); describe('sequelize#sync', function() { describe('on success', function() { it('should run hooks', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy() , modelBeforeHook = sinon.spy() , modelAfterHook = sinon.spy(); this.sequelize.beforeBulkSync(beforeHook); this.User.beforeSync(modelBeforeHook); this.User.afterSync(modelAfterHook); this.sequelize.afterBulkSync(afterHook); return this.sequelize.sync().then(function() { expect(beforeHook).to.have.been.calledOnce; expect(modelBeforeHook).to.have.been.calledOnce; expect(modelAfterHook).to.have.been.calledOnce; expect(afterHook).to.have.been.calledOnce; }); }); it('should not run hooks if "hooks = false" option passed', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy() , modelBeforeHook = sinon.spy() , modelAfterHook = sinon.spy(); this.sequelize.beforeBulkSync(beforeHook); this.User.beforeSync(modelBeforeHook); this.User.afterSync(modelAfterHook); this.sequelize.afterBulkSync(afterHook); return this.sequelize.sync({ hooks: false }).then(function() { expect(beforeHook).to.not.have.been.called; expect(modelBeforeHook).to.not.have.been.called; expect(modelAfterHook).to.not.have.been.called; expect(afterHook).to.not.have.been.called; }); }); afterEach(function() { this.sequelize.options.hooks = {}; }); }); describe('on error', function() { it('should return an error from before', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy(); this.sequelize.beforeBulkSync(function(options) { beforeHook(); throw new Error('Whoops!'); }); this.sequelize.afterBulkSync(afterHook); return expect(this.sequelize.sync()).to.be.rejected.then(function(err) { expect(beforeHook).to.have.been.calledOnce; expect(afterHook).not.to.have.been.called; }); }); it('should return an error from after', function() { var beforeHook = sinon.spy() , afterHook = sinon.spy(); this.sequelize.beforeBulkSync(beforeHook); this.sequelize.afterBulkSync(function(options) { afterHook(); throw new Error('Whoops!'); }); return expect(this.sequelize.sync()).to.be.rejected.then(function(err) { expect(beforeHook).to.have.been.calledOnce; expect(afterHook).to.have.been.calledOnce; }); }); afterEach(function() { this.sequelize.options.hooks = {}; }); }); }); describe('#removal', function() { it('should be able to remove by name', function() { var sasukeHook = sinon.spy() , narutoHook = sinon.spy(); this.User.hook('beforeCreate', 'sasuke', sasukeHook); this.User.hook('beforeCreate', 'naruto', narutoHook); return this.User.create({ username: 'makunouchi'}).then(() => { expect(sasukeHook).to.have.been.calledOnce; expect(narutoHook).to.have.been.calledOnce; this.User.removeHook('beforeCreate', 'sasuke'); return this.User.create({ username: 'sendo'}); }).then(() => { expect(sasukeHook).to.have.been.calledOnce; expect(narutoHook).to.have.been.calledTwice; }); }); it('should be able to remove by reference', function() { var sasukeHook = sinon.spy() , narutoHook = sinon.spy(); this.User.hook('beforeCreate', sasukeHook); this.User.hook('beforeCreate', narutoHook); return this.User.create({ username: 'makunouchi'}).then(() => { expect(sasukeHook).to.have.been.calledOnce; expect(narutoHook).to.have.been.calledOnce; this.User.removeHook('beforeCreate', sasukeHook); return this.User.create({ username: 'sendo'}); }).then(() => { expect(sasukeHook).to.have.been.calledOnce; expect(narutoHook).to.have.been.calledTwice; }); }); it('should be able to remove proxies', function() { var sasukeHook = sinon.spy() , narutoHook = sinon.spy(); this.User.hook('beforeSave', sasukeHook); this.User.hook('beforeSave', narutoHook); return this.User.create({ username: 'makunouchi'}).then((user) => { expect(sasukeHook).to.have.been.calledOnce; expect(narutoHook).to.have.been.calledOnce; this.User.removeHook('beforeSave', sasukeHook); return user.updateAttributes({ username: 'sendo'}); }).then(() => { expect(sasukeHook).to.have.been.calledOnce; expect(narutoHook).to.have.been.calledTwice; }); }); }); });<|fim▁end|>
, afterHook = sinon.spy(); this.User.beforeSync(beforeHook);
<|file_name|>jump_to.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """<|fim▁hole|># # This file is licensed under the GNU General Public License v3 # (GPLv3). See LICENSE.txt for details. # testing if we are preserving task_data between wf jumps def main_wf(current): current.task_data['from_main'] = True current.output['from_jumped'] = current.task_data.get('from_jumped') assert current.workflow.name == 'jump_to_wf' def jumped_wf(current): current.output['from_main'] = current.task_data['from_main'] current.task_data['from_jumped'] = True assert current.workflow.name == 'jump_to_wf2' def set_external_wf(current): current.task_data['external_wf'] = 'jump_to_wf2'<|fim▁end|>
""" # Copyright (C) 2015 ZetaOps Inc.
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for @storybook/addon-actions 3.4 // Project: https://github.com/storybooks/storybook, https://github.com/storybooks/storybook/tree/master/addons/actions // Definitions by: Joscha Feth <https://github.com/joscha>, June <https://github.com/jicjjang><|fim▁hole|>export type DecoratorFunction = (args: any[]) => any[]; export interface Options { depth?: number; clearOnStoryChange?: boolean; limit?: number; } export function decorateAction(decorators: DecoratorFunction[]): (name: string, options?: Options) => HandlerFunction; export function configureActions(options: Options): undefined; export function action(name: string): HandlerFunction;<|fim▁end|>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.8 export type HandlerFunction = (...args: any[]) => undefined;
<|file_name|>test_user.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os import pytest from skylines.lib import files from skylines.lib.types import is_unicode from skylines.model import User, IGCFile from tests.data import users, igcs <|fim▁hole|> def test_user_delete_deletes_user(db_session): john = users.john() db_session.add(john) db_session.commit() john_id = john.id assert john_id is not None assert db_session.query(User).get(john_id) is not None john.delete() db_session.commit() assert db_session.query(User).get(john_id) is None @pytest.mark.usefixtures("files_folder") def test_user_delete_deletes_owned_igc_files(db_session): with open(igcs.simple_path, "rb") as f: filename = files.add_file("simple.igc", f) assert filename is not None assert os.path.isfile(files.filename_to_path(filename)) john = users.john() igc = igcs.simple(owner=john, filename=filename) db_session.add(igc) db_session.commit() assert db_session.query(IGCFile).count() == 1 assert db_session.query(IGCFile).get(igc.id).owner_id == john.id john.delete() db_session.commit() assert db_session.query(IGCFile).count() == 0 assert not os.path.isfile(files.filename_to_path(filename)) def test_repr_is_str(db_session): john = users.john(last_name=u"Müller") db_session.add(john) db_session.commit() assert isinstance(repr(john), str) assert repr(john) == "<User: [email protected], display=John Müller>" def test_hash_password(): hash = User._hash_password(u"secret123", salt=b"abcdef") assert ( hash == "bef57ec7f53a6d40beb640a780a639c83bc29ac8a9816f1fc6c5c6dcd93c4721272b82aa344691fb4037f20617b1d19212042e7e6cb39f4ba0dad95d8137104a" ) assert is_unicode(hash)<|fim▁end|>
<|file_name|>GuiHealth.java<|end_file_name|><|fim▁begin|>package com.tage.rpgutil.client.gui; import net.minecraftforge.client.event.RenderGameOverlayEvent; import cpw.mods.fml.common.eventhandler.Cancelable; import cpw.mods.fml.common.eventhandler.Event;<|fim▁hole|>public class GuiHealth extends Event { public static enum ElementType { ARMOR, HEALTH, } }<|fim▁end|>
import cpw.mods.fml.common.eventhandler.EventPriority; @Cancelable
<|file_name|>new_users_saver.py<|end_file_name|><|fim▁begin|>################################################################################ # new_users_saver funciton ################################################################################ def newusers(m): dict_updater() un = m.from_user.username if un not in DBDIC: uid = m.from_user.id DBDIC[un] = [uid,0] if hasattr(m, 'new_chat_participant'): un = m.new_chat_participant.username if un not in DBDIC: uid = m.new_chat_participant.id DBDIC[un] = [uid,0] dict_saver() ################################################################################<|fim▁hole|>################################################################################<|fim▁end|>
# "newusers" saves new users in the dictionary # (see dict_updater_saver.py for "dict_updater()" and "dict_saver()")
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Generated by typings // Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/587f9d9cb018514c03434ccc0fc53ffcad32e8b7/whatwg-fetch/whatwg-fetch.d.ts // Type definitions for fetch API // Project: https://github.com/github/fetch // Definitions by: Ryan Graham <https://github.com/ryan-codingintrigue> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped declare class Request extends Body { constructor(input: string|Request, init?:RequestInit); method: string; url: string; headers: Headers; context: RequestContext; referrer: string; mode: RequestMode; redirect: RequestRedirect; credentials: RequestCredentials; cache: RequestCache; } interface RequestInit { method?: string; headers?: HeaderInit|{ [index: string]: string }; body?: BodyInit; mode?: RequestMode; redirect?: RequestRedirect; credentials?: RequestCredentials; cache?: RequestCache; } type RequestContext = "audio" | "beacon" | "cspreport" | "download" | "embed" | "eventsource" | "favicon" | "fetch" | "font" | "form" | "frame" | "hyperlink" | "iframe" | "image" | "imageset" | "import" | "internal" | "location" | "manifest" | "object" | "ping" | "plugin" | "prefetch" | "script" | "serviceworker" | "sharedworker" | "subresource" | "style" | "track" | "video" | "worker" | "xmlhttprequest" | "xslt"; type RequestMode = "same-origin" | "no-cors" | "cors"; type RequestRedirect = "follow" | "error" | "manual"; type RequestCredentials = "omit" | "same-origin" | "include"; type RequestCache = "default" | "no-store" | "reload" | "no-cache" | "force-cache" | "only-if-cached"; declare class Headers { append(name: string, value: string): void; delete(name: string):void; get(name: string): string; getAll(name: string): Array<string>;<|fim▁hole|> forEach(callback: (value: string, name: string) => void): void; } declare class Body { bodyUsed: boolean; arrayBuffer(): Promise<ArrayBuffer>; blob(): Promise<Blob>; formData(): Promise<FormData>; json(): Promise<any>; json<T>(): Promise<T>; text(): Promise<string>; } declare class Response extends Body { constructor(body?: BodyInit, init?: ResponseInit); static error(): Response; static redirect(url: string, status: number): Response; type: ResponseType; url: string; status: number; ok: boolean; statusText: string; headers: Headers; clone(): Response; } type ResponseType = "basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"; interface ResponseInit { status: number; statusText?: string; headers?: HeaderInit; } declare type HeaderInit = Headers|Array<string>; declare type BodyInit = ArrayBuffer|ArrayBufferView|Blob|FormData|string; declare type RequestInfo = Request|string; interface Window { fetch(url: string|Request, init?: RequestInit): Promise<Response>; } declare var fetch: typeof window.fetch;<|fim▁end|>
has(name: string): boolean; set(name: string, value: string): void;
<|file_name|>test_dict_vectorizer.py<|end_file_name|><|fim▁begin|># Authors: Lars Buitinck # Dan Blanchard <[email protected]> # License: BSD 3 clause from random import Random import numpy as np import scipy.sparse as sp from numpy.testing import assert_array_equal from numpy.testing import assert_allclose import pytest from sklearn.feature_extraction import DictVectorizer from sklearn.feature_selection import SelectKBest, chi2 @pytest.mark.parametrize("sparse", (True, False)) @pytest.mark.parametrize("dtype", (int, np.float32, np.int16)) @pytest.mark.parametrize("sort", (True, False)) @pytest.mark.parametrize("iterable", (True, False)) def test_dictvectorizer(sparse, dtype, sort, iterable): D = [{"foo": 1, "bar": 3}, {"bar": 4, "baz": 2}, {"bar": 1, "quux": 1, "quuux": 2}] v = DictVectorizer(sparse=sparse, dtype=dtype, sort=sort) X = v.fit_transform(iter(D) if iterable else D) assert sp.issparse(X) == sparse assert X.shape == (3, 5) assert X.sum() == 14 assert v.inverse_transform(X) == D if sparse: # CSR matrices can't be compared for equality assert_array_equal(X.A, v.transform(iter(D) if iterable else D).A) else: assert_array_equal(X, v.transform(iter(D) if iterable else D)) if sort: assert v.feature_names_ == sorted(v.feature_names_) def test_feature_selection(): # make two feature dicts with two useful features and a bunch of useless # ones, in terms of chi2 d1 = dict([("useless%d" % i, 10) for i in range(20)], useful1=1, useful2=20) d2 = dict([("useless%d" % i, 10) for i in range(20)], useful1=20, useful2=1) for indices in (True, False): v = DictVectorizer().fit([d1, d2]) X = v.transform([d1, d2]) sel = SelectKBest(chi2, k=2).fit(X, [0, 1]) v.restrict(sel.get_support(indices=indices), indices=indices) assert v.get_feature_names() == ["useful1", "useful2"] def test_one_of_k(): D_in = [ {"version": "1", "ham": 2}, {"version": "2", "spam": 0.3}, {"version=3": True, "spam": -1}, ] v = DictVectorizer() X = v.fit_transform(D_in) assert X.shape == (3, 5) D_out = v.inverse_transform(X) assert D_out[0] == {"version=1": 1, "ham": 2} names = v.get_feature_names() assert "version=2" in names assert "version" not in names def test_iterable_value(): D_names = ["ham", "spam", "version=1", "version=2", "version=3"] X_expected = [ [2.0, 0.0, 2.0, 1.0, 0.0], [0.0, 0.3, 0.0, 1.0, 0.0], [0.0, -1.0, 0.0, 0.0, 1.0], ] D_in = [ {"version": ["1", "2", "1"], "ham": 2}, {"version": "2", "spam": 0.3}, {"version=3": True, "spam": -1}, ] v = DictVectorizer() X = v.fit_transform(D_in) X = X.toarray() assert_array_equal(X, X_expected) D_out = v.inverse_transform(X) assert D_out[0] == {"version=1": 2, "version=2": 1, "ham": 2} names = v.get_feature_names() assert names == D_names def test_iterable_not_string_error(): error_value = ( "Unsupported type <class 'int'> in iterable value. " "Only iterables of string are supported." ) D2 = [{"foo": "1", "bar": "2"}, {"foo": "3", "baz": "1"}, {"foo": [1, "three"]}]<|fim▁hole|> def test_mapping_error(): error_value = ( "Unsupported value type <class 'dict'> " "for foo: {'one': 1, 'three': 3}.\n" "Mapping objects are not supported." ) D2 = [ {"foo": "1", "bar": "2"}, {"foo": "3", "baz": "1"}, {"foo": {"one": 1, "three": 3}}, ] v = DictVectorizer(sparse=False) with pytest.raises(TypeError) as error: v.fit(D2) assert str(error.value) == error_value def test_unseen_or_no_features(): D = [{"camelot": 0, "spamalot": 1}] for sparse in [True, False]: v = DictVectorizer(sparse=sparse).fit(D) X = v.transform({"push the pram a lot": 2}) if sparse: X = X.toarray() assert_array_equal(X, np.zeros((1, 2))) X = v.transform({}) if sparse: X = X.toarray() assert_array_equal(X, np.zeros((1, 2))) try: v.transform([]) except ValueError as e: assert "empty" in str(e) def test_deterministic_vocabulary(): # Generate equal dictionaries with different memory layouts items = [("%03d" % i, i) for i in range(1000)] rng = Random(42) d_sorted = dict(items) rng.shuffle(items) d_shuffled = dict(items) # check that the memory layout does not impact the resulting vocabulary v_1 = DictVectorizer().fit([d_sorted]) v_2 = DictVectorizer().fit([d_shuffled]) assert v_1.vocabulary_ == v_2.vocabulary_ def test_n_features_in(): # For vectorizers, n_features_in_ does not make sense and does not exist. dv = DictVectorizer() assert not hasattr(dv, "n_features_in_") d = [{"foo": 1, "bar": 2}, {"foo": 3, "baz": 1}] dv.fit(d) assert not hasattr(dv, "n_features_in_") def test_dictvectorizer_dense_sparse_equivalence(): """Check the equivalence between between sparse and dense DictVectorizer. Non-regression test for: https://github.com/scikit-learn/scikit-learn/issues/19978 """ movie_entry_fit = [ {"category": ["thriller", "drama"], "year": 2003}, {"category": ["animation", "family"], "year": 2011}, {"year": 1974}, ] movie_entry_transform = [{"category": ["thriller"], "unseen_feature": "3"}] dense_vectorizer = DictVectorizer(sparse=False) sparse_vectorizer = DictVectorizer(sparse=True) dense_vector_fit = dense_vectorizer.fit_transform(movie_entry_fit) sparse_vector_fit = sparse_vectorizer.fit_transform(movie_entry_fit) assert not sp.issparse(dense_vector_fit) assert sp.issparse(sparse_vector_fit) assert_allclose(dense_vector_fit, sparse_vector_fit.toarray()) dense_vector_transform = dense_vectorizer.transform(movie_entry_transform) sparse_vector_transform = sparse_vectorizer.transform(movie_entry_transform) assert not sp.issparse(dense_vector_transform) assert sp.issparse(sparse_vector_transform) assert_allclose(dense_vector_transform, sparse_vector_transform.toarray()) dense_inverse_transform = dense_vectorizer.inverse_transform(dense_vector_transform) sparse_inverse_transform = sparse_vectorizer.inverse_transform( sparse_vector_transform ) expected_inverse = [{"category=thriller": 1.0}] assert dense_inverse_transform == expected_inverse assert sparse_inverse_transform == expected_inverse def test_dict_vectorizer_unsupported_value_type(): """Check that we raise an error when the value associated to a feature is not supported. Non-regression test for: https://github.com/scikit-learn/scikit-learn/issues/19489 """ class A: pass vectorizer = DictVectorizer(sparse=True) X = [{"foo": A()}] err_msg = "Unsupported value Type" with pytest.raises(TypeError, match=err_msg): vectorizer.fit_transform(X)<|fim▁end|>
v = DictVectorizer(sparse=False) with pytest.raises(TypeError) as error: v.fit(D2) assert str(error.value) == error_value
<|file_name|>udp.rs<|end_file_name|><|fim▁begin|>use crate::sys::unix::net::{new_ip_socket, socket_addr}; use std::io; use std::mem; use std::net::{self, SocketAddr}; use std::os::unix::io::{AsRawFd, FromRawFd}; pub fn bind(addr: SocketAddr) -> io::Result<net::UdpSocket> { // Gives a warning for non Apple platforms. #[allow(clippy::let_and_return)]<|fim▁hole|> let socket = new_ip_socket(addr, libc::SOCK_DGRAM); socket.and_then(|socket| { let (raw_addr, raw_addr_length) = socket_addr(&addr); syscall!(bind(socket, raw_addr.as_ptr(), raw_addr_length)) .map_err(|err| { // Close the socket if we hit an error, ignoring the error // from closing since we can't pass back two errors. let _ = unsafe { libc::close(socket) }; err }) .map(|_| unsafe { net::UdpSocket::from_raw_fd(socket) }) }) } pub(crate) fn only_v6(socket: &net::UdpSocket) -> io::Result<bool> { let mut optval: libc::c_int = 0; let mut optlen = mem::size_of::<libc::c_int>() as libc::socklen_t; syscall!(getsockopt( socket.as_raw_fd(), libc::IPPROTO_IPV6, libc::IPV6_V6ONLY, &mut optval as *mut _ as *mut _, &mut optlen, ))?; Ok(optval != 0) }<|fim▁end|>
<|file_name|>graph_test.go<|end_file_name|><|fim▁begin|>package docker import ( "archive/tar" "bytes" "errors" "github.com/dotcloud/docker/utils" "io" "io/ioutil" "os" "path" "testing" "time" ) func TestInit(t *testing.T) { graph := tempGraph(t) defer os.RemoveAll(graph.Root) // Root should exist if _, err := os.Stat(graph.Root); err != nil { t.Fatal(err) } // All() should be empty if l, err := graph.All(); err != nil { t.Fatal(err) } else if len(l) != 0 { t.Fatalf("List() should return %d, not %d", 0, len(l)) } } // Test that Register can be interrupted cleanly without side effects func TestInterruptedRegister(t *testing.T) { graph := tempGraph(t) defer os.RemoveAll(graph.Root) badArchive, w := io.Pipe() // Use a pipe reader as a fake archive which never yields data image := &Image{ ID: GenerateID(), Comment: "testing", Created: time.Now(), } go graph.Register(badArchive, false, image) time.Sleep(200 * time.Millisecond) w.CloseWithError(errors.New("But I'm not a tarball!")) // (Nobody's perfect, darling) if _, err := graph.Get(image.ID); err == nil { t.Fatal("Image should not exist after Register is interrupted") } // Registering the same image again should succeed if the first register was interrupted goodArchive, err := fakeTar() if err != nil { t.Fatal(err) } if err := graph.Register(goodArchive, false, image); err != nil { t.Fatal(err) } } // FIXME: Do more extensive tests (ex: create multiple, delete, recreate; // create multiple, check the amount of images and paths, etc..) func TestGraphCreate(t *testing.T) { graph := tempGraph(t) defer os.RemoveAll(graph.Root) archive, err := fakeTar() if err != nil { t.Fatal(err) } image, err := graph.Create(archive, nil, "Testing", "", nil) if err != nil { t.Fatal(err) } if err := ValidateID(image.ID); err != nil { t.Fatal(err) } if image.Comment != "Testing" { t.Fatalf("Wrong comment: should be '%s', not '%s'", "Testing", image.Comment) } if image.DockerVersion != VERSION { t.Fatalf("Wrong docker_version: should be '%s', not '%s'", VERSION, image.DockerVersion) } if images, err := graph.All(); err != nil { t.Fatal(err) } else if l := len(images); l != 1 { t.Fatalf("Wrong number of images. Should be %d, not %d", 1, l) } } func TestRegister(t *testing.T) { graph := tempGraph(t) defer os.RemoveAll(graph.Root) archive, err := fakeTar() if err != nil { t.Fatal(err) } image := &Image{ ID: GenerateID(), Comment: "testing", Created: time.Now(), } err = graph.Register(archive, false, image) if err != nil { t.Fatal(err) } if images, err := graph.All(); err != nil { t.Fatal(err) } else if l := len(images); l != 1 { t.Fatalf("Wrong number of images. Should be %d, not %d", 1, l) } if resultImg, err := graph.Get(image.ID); err != nil { t.Fatal(err) } else { if resultImg.ID != image.ID { t.Fatalf("Wrong image ID. Should be '%s', not '%s'", image.ID, resultImg.ID) } if resultImg.Comment != image.Comment { t.Fatalf("Wrong image comment. Should be '%s', not '%s'", image.Comment, resultImg.Comment) } } } func TestMount(t *testing.T) { graph := tempGraph(t)<|fim▁hole|> archive, err := fakeTar() if err != nil { t.Fatal(err) } image, err := graph.Create(archive, nil, "Testing", "", nil) if err != nil { t.Fatal(err) } tmp, err := ioutil.TempDir("", "docker-test-graph-mount-") if err != nil { t.Fatal(err) } defer os.RemoveAll(tmp) rootfs := path.Join(tmp, "rootfs") if err := os.MkdirAll(rootfs, 0700); err != nil { t.Fatal(err) } rw := path.Join(tmp, "rw") if err := os.MkdirAll(rw, 0700); err != nil { t.Fatal(err) } if err := image.Mount(rootfs, rw); err != nil { t.Fatal(err) } // FIXME: test for mount contents defer func() { if err := Unmount(rootfs); err != nil { t.Error(err) } }() } // Test that an image can be deleted by its shorthand prefix func TestDeletePrefix(t *testing.T) { graph := tempGraph(t) defer os.RemoveAll(graph.Root) img := createTestImage(graph, t) if err := graph.Delete(utils.TruncateID(img.ID)); err != nil { t.Fatal(err) } assertNImages(graph, t, 0) } func createTestImage(graph *Graph, t *testing.T) *Image { archive, err := fakeTar() if err != nil { t.Fatal(err) } img, err := graph.Create(archive, nil, "Test image", "", nil) if err != nil { t.Fatal(err) } return img } func TestDelete(t *testing.T) { graph := tempGraph(t) defer os.RemoveAll(graph.Root) archive, err := fakeTar() if err != nil { t.Fatal(err) } assertNImages(graph, t, 0) img, err := graph.Create(archive, nil, "Bla bla", "", nil) if err != nil { t.Fatal(err) } assertNImages(graph, t, 1) if err := graph.Delete(img.ID); err != nil { t.Fatal(err) } assertNImages(graph, t, 0) // Test 2 create (same name) / 1 delete img1, err := graph.Create(archive, nil, "Testing", "", nil) if err != nil { t.Fatal(err) } if _, err = graph.Create(archive, nil, "Testing", "", nil); err != nil { t.Fatal(err) } assertNImages(graph, t, 2) if err := graph.Delete(img1.ID); err != nil { t.Fatal(err) } assertNImages(graph, t, 1) // Test delete wrong name if err := graph.Delete("Not_foo"); err == nil { t.Fatalf("Deleting wrong ID should return an error") } assertNImages(graph, t, 1) // Test delete twice (pull -> rm -> pull -> rm) if err := graph.Register(archive, false, img1); err != nil { t.Fatal(err) } if err := graph.Delete(img1.ID); err != nil { t.Fatal(err) } assertNImages(graph, t, 1) } func assertNImages(graph *Graph, t *testing.T, n int) { if images, err := graph.All(); err != nil { t.Fatal(err) } else if actualN := len(images); actualN != n { t.Fatalf("Expected %d images, found %d", n, actualN) } } /* * HELPER FUNCTIONS */ func tempGraph(t *testing.T) *Graph { tmp, err := ioutil.TempDir("", "docker-graph-") if err != nil { t.Fatal(err) } graph, err := NewGraph(tmp) if err != nil { t.Fatal(err) } return graph } func testArchive(t *testing.T) Archive { archive, err := fakeTar() if err != nil { t.Fatal(err) } return archive } func fakeTar() (io.Reader, error) { content := []byte("Hello world!\n") buf := new(bytes.Buffer) tw := tar.NewWriter(buf) for _, name := range []string{"/etc/postgres/postgres.conf", "/etc/passwd", "/var/log/postgres/postgres.conf"} { hdr := new(tar.Header) hdr.Size = int64(len(content)) hdr.Name = name if err := tw.WriteHeader(hdr); err != nil { return nil, err } tw.Write([]byte(content)) } tw.Close() return buf, nil }<|fim▁end|>
defer os.RemoveAll(graph.Root)
<|file_name|>RoutingPrinterTest.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2011 Carl Green * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package info.carlwithak.mpxg2.printing; import info.carlwithak.mpxg2.model.Program; import info.carlwithak.mpxg2.model.RoutingData; import info.carlwithak.mpxg2.model.effects.algorithms.Ambience; import info.carlwithak.mpxg2.model.effects.algorithms.AutoPan; import info.carlwithak.mpxg2.model.effects.algorithms.Chamber; import info.carlwithak.mpxg2.model.effects.algorithms.ChorusAlgorithm; import info.carlwithak.mpxg2.model.effects.algorithms.ChorusPedalVol; import info.carlwithak.mpxg2.model.effects.algorithms.DelayDual; import info.carlwithak.mpxg2.model.effects.algorithms.DetuneDual; import info.carlwithak.mpxg2.model.effects.algorithms.EchoDual; import info.carlwithak.mpxg2.model.effects.algorithms.EqPedalVol; import info.carlwithak.mpxg2.model.effects.algorithms.Overdrive; import info.carlwithak.mpxg2.model.effects.algorithms.Panner; import info.carlwithak.mpxg2.model.effects.algorithms.PedalWah1; import info.carlwithak.mpxg2.model.effects.algorithms.Plate; import info.carlwithak.mpxg2.model.effects.algorithms.Screamer; import info.carlwithak.mpxg2.model.effects.algorithms.ShiftDual; import info.carlwithak.mpxg2.model.effects.algorithms.Tone; import info.carlwithak.mpxg2.model.effects.algorithms.UniVybe; import info.carlwithak.mpxg2.model.effects.algorithms.VolumeDual; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Tests for RoutingPrinter. * * @author Carl Green */ public class RoutingPrinterTest { /** * Test printing a textual representation of the routing. * * G2 Blue is a simple all along the upper route routing. * * @throws PrintException if an error is encountered while printing */ @Test public void testPrintG2Blue() throws PrintException { Program program = new Program(); RoutingData routing = new RoutingData(); routing.setEffectId(8); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting0(routing); routing = new RoutingData();<|fim▁hole|> routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting1(routing); routing = new RoutingData(); routing.setEffectId(1); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting2(routing); routing = new RoutingData(); routing.setEffectId(6); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting3(routing); routing = new RoutingData(); routing.setEffectId(2); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting4(routing); routing = new RoutingData(); routing.setEffectId(3); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting5(routing); routing = new RoutingData(); routing.setEffectId(4); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting6(routing); routing = new RoutingData(); routing.setEffectId(5); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting7(routing); routing = new RoutingData(); routing.setEffectId(7); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting8(routing); program.setEffect1(new UniVybe()); program.setEffect2(new PedalWah1()); program.setChorus(new ChorusPedalVol()); program.setDelay(new EchoDual()); program.setReverb(new Ambience()); program.setGain(new Screamer()); String expected = "I=1=2=G=C=D=R=e=O"; assertEquals(expected, RoutingPrinter.print(program)); } /** * Test printing a textual representation of the routing. * * Guitar Solo splits into the lower route. * * @throws PrintException if an error is encountered while printing */ @Test public void testPrintGuitarSolo() throws PrintException { Program program = new Program(); RoutingData routing = new RoutingData(); routing.setEffectId(8); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting0(routing); routing = new RoutingData(); routing.setEffectId(5); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting1(routing); routing = new RoutingData(); routing.setEffectId(2); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting2(routing); routing = new RoutingData(); routing.setEffectId(6); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting3(routing); routing = new RoutingData(); routing.setEffectId(0); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(3); routing.setPathType(0); program.setRouting4(routing); routing = new RoutingData(); routing.setEffectId(3); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting5(routing); routing = new RoutingData(); routing.setEffectId(4); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(1); program.setRouting6(routing); routing = new RoutingData(); routing.setEffectId(1); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(2); routing.setPathType(1); program.setRouting7(routing); routing = new RoutingData(); routing.setEffectId(7); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting8(routing); program.setEffect1(new DetuneDual()); program.setEffect2(new Panner()); program.setDelay(new EchoDual()); program.setReverb(new Plate()); program.setGain(new Screamer()); String expected = "I=e=c=G=1===R=2=O\n" + " |=D===|"; String actual = RoutingPrinter.print(program); assertEquals(expected, actual); } /** * Test printing a textual representation of the routing. * * Cordovox splits and has mono and stereo paths. * * @throws PrintException if an error is encountered while printing */ @Test public void testPrintCordovox() throws PrintException { Program program = new Program(); RoutingData routing = new RoutingData(); routing.setEffectId(8); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting0(routing); routing = new RoutingData(); routing.setEffectId(5); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting1(routing); routing = new RoutingData(); routing.setEffectId(6); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting2(routing); routing = new RoutingData(); routing.setEffectId(2); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(3); routing.setPathType(0); program.setRouting3(routing); routing = new RoutingData(); routing.setEffectId(0); routing.setUpperInputConnection(0); routing.setLowerInputConnection(4); routing.setRouting(1); routing.setPathType(1); program.setRouting4(routing); routing = new RoutingData(); routing.setEffectId(1); routing.setUpperInputConnection(3); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(1); program.setRouting5(routing); routing = new RoutingData(); routing.setEffectId(3); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(1); program.setRouting6(routing); routing = new RoutingData(); routing.setEffectId(4); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(1); program.setRouting7(routing); routing = new RoutingData(); routing.setEffectId(7); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(2); routing.setPathType(1); program.setRouting8(routing); program.setEffect1(new AutoPan()); program.setEffect2(new AutoPan()); program.setChorus(new ChorusAlgorithm()); program.setDelay(new EchoDual()); program.setReverb(new Chamber()); program.setEq(new EqPedalVol()); program.setGain(new Tone()); String expected = "I=E=G=C--\\2=D=R=O\n" + " |/1=======|"; String actual = RoutingPrinter.print(program); assertEquals(expected, actual); } /** * Test printing a textual representation of the routing. * * PowerChords has "lower case numbers". * * @throws PrintException if an error is encountered while printing */ @Test public void testPrintPowerChords() throws PrintException { Program program = new Program(); RoutingData routing = new RoutingData(); routing.setEffectId(8); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting0(routing); routing = new RoutingData(); routing.setEffectId(1); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting1(routing); routing = new RoutingData(); routing.setEffectId(6); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting2(routing); routing = new RoutingData(); routing.setEffectId(5); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting3(routing); routing = new RoutingData(); routing.setEffectId(0); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting4(routing); routing = new RoutingData(); routing.setEffectId(2); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting5(routing); routing = new RoutingData(); routing.setEffectId(3); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting6(routing); routing = new RoutingData(); routing.setEffectId(4); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting7(routing); routing = new RoutingData(); routing.setEffectId(7); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting8(routing); program.setEffect1(new ShiftDual()); program.setDelay(new DelayDual()); program.setReverb(new Chamber()); program.setGain(new Overdrive()); String expected = "I=₂=G=e=1=c=D=R=O"; assertEquals(expected, RoutingPrinter.print(program)); } /** * Test printing a textual representation of the routing. * * Pitch Cascade has inactive effects on the lower routing. * * @throws PrintException if an error is encountered while printing */ @Test public void testPrintPitchCascase() throws PrintException { Program program = new Program(); RoutingData routing = new RoutingData(); routing.setEffectId(8); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting0(routing); routing = new RoutingData(); routing.setEffectId(6); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting1(routing); routing = new RoutingData(); routing.setEffectId(1); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(3); routing.setPathType(0); program.setRouting2(routing); routing = new RoutingData(); routing.setEffectId(5); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting3(routing); routing = new RoutingData(); routing.setEffectId(3); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting4(routing); routing = new RoutingData(); routing.setEffectId(0); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting5(routing); routing = new RoutingData(); routing.setEffectId(2); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting6(routing); routing = new RoutingData(); routing.setEffectId(4); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(2); routing.setPathType(1); program.setRouting7(routing); routing = new RoutingData(); routing.setEffectId(7); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting8(routing); program.setEffect1(new ShiftDual()); program.setEffect2(new VolumeDual()); program.setDelay(new DelayDual()); program.setReverb(new Ambience()); program.setEq(new EqPedalVol()); program.setGain(new Overdrive()); String expected = "I=G=2=========R=O\n" + " |=E=D=1=c=|"; assertEquals(expected, RoutingPrinter.print(program)); } /** * Test printing an invalid routing where it splits into two routes but * never combines again. * * @throws PrintException if an error is encountered while printing */ @Test(expected = PrintException.class) public void testInvalidRouting() throws PrintException { Program program = new Program(); RoutingData routing = new RoutingData(); routing.setEffectId(8); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting0(routing); routing = new RoutingData(); routing.setEffectId(5); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting1(routing); routing = new RoutingData(); routing.setEffectId(2); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting2(routing); routing = new RoutingData(); routing.setEffectId(6); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(0); routing.setPathType(0); program.setRouting3(routing); routing = new RoutingData(); routing.setEffectId(0); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(3); routing.setPathType(0); program.setRouting4(routing); routing = new RoutingData(); routing.setEffectId(3); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting5(routing); routing = new RoutingData(); routing.setEffectId(4); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting6(routing); routing = new RoutingData(); routing.setEffectId(1); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting7(routing); routing = new RoutingData(); routing.setEffectId(7); routing.setUpperInputConnection(0); routing.setLowerInputConnection(0); routing.setRouting(1); routing.setPathType(1); program.setRouting8(routing); RoutingPrinter.print(program); } }<|fim▁end|>
routing.setEffectId(0); routing.setUpperInputConnection(0);
<|file_name|>FizzyText.js<|end_file_name|><|fim▁begin|>/* Credits: Most of the original code seems to have been written by George Michael Brower. The changes I've made include adding background particle animations, text placement and modification, and integration with a sparkfun heart rate monitor by using Pubnub and johnny-five. INSTRUCTIONS - npm install [email protected] - npm install johnny-five - node Board.js to hook up to johnnyfive */ function FizzyText(message) { var that = this; // These are the variables that we manipulate with gui-dat. // Notice they're all defined with "this". That makes them public. // Otherwise, gui-dat can't see them. this.growthSpeed = 0.98; // how fast do particles change size? // this.maxSize = getRandomIntInclusive(3, 4); // how big can they get? this.maxSize = 1.3; this.noiseStrength = 1.9; // how turbulent is the flow? this.bgNoiseStrength = 10; this.speed = 0; // how fast do particles move? this.bgSpeed = 0.4; this.displayOutline = false; // should we draw the message as a stroke? this.framesRendered = 0; // this.color0 = "#00aeff"; // this.color1 = "#0fa954"; // this.color2 = "#54396e"; // this.color3 = "#e61d5f"; // this.color0 = "#ffdcfc"; // this.color1 = "#c8feff"; // this.color2 = "#ffffff"; // this.color3 = "#c8feff"; this.color0 = "#f0cf5b"; this.color1 = "#2abbf2"; this.color2 = "#660aaf"; this.color3 = "#f57596"; this.bgParticleColor = "#ffffff"; this.fontSize = 100; this.fontWeight = 800; <|fim▁hole|> this.__defineGetter__("message", function() { return message; }); this.__defineSetter__("message", function(m) { message = m; createBitmap(message); }); // We can even add functions to the DAT.GUI! As long as they have 0 argumets, // we can call them from the dat-gui panel. this.explode = function() { var mag = Math.random() * 30 + 30; for (var i in particles) { var angle = Math.random() * Math.PI * 2; particles[i].vx = Math.cos(angle) * mag; particles[i].vy = Math.sin(angle) * mag; } }; //////////////////////////////// var _this = this; var width = window.innerWidth; var height = window.innerHeight; // var textAscent = Math.random() * height; // for trans var textAscent = height / 2; // for cisco // var textOffsetLeft = Math.random() * width; var textOffsetLeft = 0; var noiseScale = 300; var frameTime = 30; // Keep the message within the canvas height bounds while ((textAscent > height - 100) || textAscent < 100) { textAscent = Math.random() * height; } var colors = [_this.color0, _this.color1, _this.color2, _this.color3]; // This is the context we use to get a bitmap of text using the // getImageData function. var r = document.createElement('canvas'); var s = r.getContext('2d'); // This is the context we actually use to draw. var c = document.createElement('canvas'); var g = c.getContext('2d'); r.setAttribute('width', width); c.setAttribute('width', width); r.setAttribute('height', height); c.setAttribute('height', height); // Add our demo to the HTML document.getElementById('fizzytext').appendChild(c); // Stores bitmap image var pixels = []; // Stores a list of particles var particles = []; var bgParticles = []; // Set g.font to the same font as the bitmap canvas, incase we want to draw some outlines var fontAttr = _this.fontWeight + " " + _this.fontSize + "px helvetica, arial, sans-serif"; s.font = g.font = fontAttr; // Instantiate some particles for (var i = 0; i < 2000; i++) { particles.push(new Particle(Math.random() * width, Math.random() * height)); } // 2nd perlin field for (var i = 0; i < 1000; i++) { // 10k particles bgParticles.push(new bgParticle(Math.random() * width, Math.random() * height)); } // This function creates a bitmap of pixels based on your message // It's called every time we change the message property. var createBitmap = function(msg) { s.fillStyle = "#fff"; s.fillRect(0, 0, width, height); s.fillStyle = "#222"; // Keep the message within canvas width bounds var msgWidth = s.measureText(msg).width; // while (textOffsetLeft + msgWidth > widthw) { // // textOffsetLeft = Math.random() * width; // } textOffsetLeft = (width - msgWidth) / 2; s.fillText(msg, textOffsetLeft, textAscent); // Pull reference var imageData = s.getImageData(0, 0, width, height); pixels = imageData.data; }; // Called once per frame, updates the animation. var render = function() { that.framesRendered++; // g.clearRect(0, 0, width, height); // Set the shown canvas background as black g.rect(0, 0, width, height); g.fillStyle = "black"; // for trans // g.fillStyle = "#eee"; // for cisco g.fill(); if (_this.displayOutline) { g.globalCompositeOperation = "source-over"; // g.strokeStyle = "#000"; // for trans g.strokeStyle = "#fff"; g.font = _this.fontSize + "px helvetica, arial, sans-serif"; // took out font weight g.lineWidth = .5; g.strokeText(message, textOffsetLeft, textAscent); } g.globalCompositeOperation = "darker"; // Choose particle color for (var i = 0; i < particles.length; i++) { g.fillStyle = colors[i % colors.length]; particles[i].render(); } // Choose bg particle color (white for testing) for (var i = 0; i < bgParticles.length; i++) { g.fillStyle = _this.bgParticleColor; bgParticles[i].render(); } }; // Func tells me where x, y is for each pixel of the text // Returns x, y coordinates for a given index in the pixel array. var getPosition = function(i) { return { x: (i - (width * 4) * Math.floor(i / (width * 4))) / 4, y: Math.floor(i / (width * 4)) }; }; // Returns a color for a given pixel in the pixel array var getColor = function(x, y) { var base = (Math.floor(y) * width + Math.floor(x)) * 4; var c = { r: pixels[base + 0], g: pixels[base + 1], b: pixels[base + 2], a: pixels[base + 3] }; return "rgb(" + c.r + "," + c.g + "," + c.b + ")"; }; // This calls the setter we've defined above, so it also calls // the createBitmap function this.message = message; // Set the canvas bg // document.getElementById('fizzytext').style.backgroundColor = colors[Math.floor(Math.random() * 4)] function resizeCanvas() { r.width = window.innerWidth; c.width = window.innerWidth; r.height = window.innerHeight; c.height = window.innerHeight; } var loop = function() { // Reset color array colors = [_this.color0, _this.color1, _this.color2, _this.color3]; // Change colors from dat.gui s.font = g.font = _this.fontWeight + " " + _this.fontSize + "px helvetica, arial, sans-serif"; createBitmap(message); // _this.fontSize += 1; resizeCanvas(); render(); requestAnimationFrame(loop); } // This calls the render function every 30ms loop(); ///////////////////////////////////////////// // This class is responsible for drawing and moving those little // colored dots. function Particle(x, y, c) { // Position this.x = x; this.y = y; // Size of particle this.r = 0; // This velocity is used by the explode function. this.vx = 0; this.vy = 0; this.constrain = function(v, o1, o2) { if (v < o1) v = o1; else if (v > o2) v = o2; return v; }; // Called every frame this.render = function () { // What color is the pixel we're sitting on top of? var c = getColor(this.x, this.y); // Where should we move? var angle = noise(this.x / noiseScale, this.y / noiseScale) * _this.noiseStrength; // Are we within the boundaries of the image? var onScreen = this.x > 0 && this.x < width && this.y > 0 && this.y < height; var isBlack = c != "rgb(255,255,255)" && onScreen; // If we're on top of a black pixel, grow. // If not, shrink. if (isBlack) { this.r += _this.growthSpeed; } else { this.r -= _this.growthSpeed; } // This velocity is used by the explode function. this.vx *= 0.5; this.vy *= 0.5; // Change our position based on the flow field and our explode velocity. this.x += Math.cos(angle) * _this.speed + this.vx; this.y += -Math.sin(angle) * _this.speed + this.vy; // this.r = 3; // debugger // console.log(DAT.GUI.constrain(this.r, 0, _this.maxSize)); this.r = this.constrain(this.r, 0, _this.maxSize); // If we're tiny, keep moving around until we find a black pixel. if (this.r <= 0) { this.x = Math.random() * width; this.y = Math.random() * height; return; // Don't draw! } // Draw the circle. g.beginPath(); g.arc(this.x, this.y, this.r, 0, Math.PI * 2, false); g.fill(); } } function bgParticle(x, y, c) { // Position this.x = x; this.y = y; // Size of particle this.r = 0; // This velocity is used by the explode function. this.vx = 0; this.vy = 0; this.constrain = function(v, o1, o2) { if (v < o1) v = o1; else if (v > o2) v = o2; return v; }; // Called every frame this.render = function () { // What color is the pixel we're sitting on top of? var c = getColor(this.x, this.y); // Where should we move? var angle = noise(this.x / noiseScale, this.y / noiseScale) * _this.bgNoiseStrength; // Are we within the boundaries of the image? var onScreen = this.x > 0 && this.x < width && this.y > 0 && this.y < height; var isBlack = c != "rgb(255,255,255)" && onScreen; // If we're on top of a black pixel, grow. // If not, shrink. if (isBlack) { this.r -= _this.growthSpeed / 2; // this.r -= Math.abs(Math.sin(_this.growthSpeed)); } else { // this.r += _this.growthSpeed / 2; this.r += Math.abs(Math.sin(_this.growthSpeed)); } // if not on screen respawn somewhere random if (!onScreen) { this.x = Math.random() * width; this.y = Math.random() * height; } // This velocity is used by the explode function. this.vx *= 0.5; this.vy *= 0.5; // Change our position based on the flow field and our explode velocity. this.x += Math.cos(angle) * _this.bgSpeed + this.vx; this.y += -Math.sin(angle) * _this.bgSpeed + this.vy; // this.r = 3; // debugger // console.log(DAT.GUI.constrain(this.r, 0, _this.maxSize)); this.r = this.constrain(this.r, 0, 2); // If we're tiny, keep moving around until we find a black pixel. if (this.r <= 0) { this.x = Math.random() * width; this.y = Math.random() * height; return; // Don't draw! } // Draw the circle. g.beginPath(); g.arc(this.x, this.y, this.r, 0, Math.PI * 2, false); g.fill(); } } } function getRandomIntInclusive(min, max) { min = Math.ceil(min); max = Math.floor(max); return Math.floor(Math.random() * (max - min + 1)) + min; }<|fim▁end|>
// __defineGetter__ and __defineSetter__ make JavaScript believe that // we've defined a variable 'this.message'. This way, whenever we // change the message variable, we can call some more functions.
<|file_name|>network_json.go<|end_file_name|><|fim▁begin|>// +build json package net import ( "encoding/json" "github.com/v2ray/v2ray-core/common/serial" ) func (this *NetworkList) UnmarshalJSON(data []byte) error { var strlist serial.StringLiteralList if err := json.Unmarshal(data, &strlist); err != nil { return err } *this = NewNetworkList(strlist) return nil<|fim▁hole|>}<|fim▁end|>
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>use std::fs::File; use std::io::BufReader; use std::io::Error; use std::io::prelude::*; use std::path::Path; pub fn get_mnist_vector(fname: &str) -> Result<Vec<Vec<f32>>, Error> { let path = Path::new(fname); match File::open(&path) { Ok(file) => { let mut new_vec: Vec<Vec<f32>> = Vec::new(); let reader = BufReader::new(file); for line in reader.lines() { match line { Ok(s) => { new_vec.push(mnist_test_to_vector(&s)); } Err(reason) => { return Err(reason); } }; } Ok(new_vec) } Err(reason) => Err(reason), } } pub fn mnist_test_to_vector(line: &str) -> Vec<f32> { line.trim().split(' ').map(|instr| instr.parse().unwrap()).collect() } pub mod xvecs { extern crate byteorder; use std::fs::File; use std::path::Path; use std::io::{BufReader, Result}; use self::byteorder::{ReadBytesExt, LittleEndian}; pub fn read_fvecs_file(fname: &str) -> Result<Vec<Vec<f32>>> { let path = Path::new(fname); let f = try!(File::open(path)); let mut br = BufReader::new(f); let mut output_vec = Vec::new(); while let Ok(i) = br.read_u32::<LittleEndian>() {<|fim▁hole|> let mut line_vec = vec![0.0 as f32; ind]; for j in 0..ind { line_vec[j] = try!(br.read_f32::<LittleEndian>()); } output_vec.push(line_vec); } Ok(output_vec) } pub fn read_ivecs_file(fname: &str) -> Result<Vec<Vec<i32>>> { let path = Path::new(fname); let f = try!(File::open(path)); let mut br = BufReader::new(f); let mut output_vec = Vec::new(); while let Ok(i) = br.read_u32::<LittleEndian>() { let ind = i as usize; let mut line_vec = vec![0 as i32; ind]; for j in 0..ind { line_vec[j] = try!(br.read_i32::<LittleEndian>()); } output_vec.push(line_vec); } Ok(output_vec) } } #[cfg(test)] mod tests { use super::*; use super::xvecs::*; #[test] fn test_read_mnist_file() { let fname: &str = "./mnist1k.dts"; match get_mnist_vector(fname) { Ok(v) => println!("{:?}", v), Err(reason) => panic!("couldn't open because {}", reason), } } #[test] fn test_mnist_line_to_vector() { let my_vec = vec![0.0000, 1.0000, 0.00000, 0.20000]; let my_string = "0.0000 1.0000 0.00000 0.20000"; assert!(my_vec == mnist_test_to_vector(&my_string)); } #[test] fn test_read_fvecs_file() { let y = read_fvecs_file("./sift_query.fvecs").unwrap(); println!("{} vectors, length of first is {}", y.len(), y[0].len()); } }<|fim▁end|>
let ind = i as usize;
<|file_name|>pomodoro.py<|end_file_name|><|fim▁begin|>import CursesTimer import Audio import Utils import database import datetime class Pomodoro: ''' pomodoro関連 ''' def __init__(self): self.db = database.database() self.t = CursesTimer.CursesTimer() self.u = Utils.Utils() self.NofPomodoro = 4 #休憩まで何回ポモドーロするか self.workTime = 25 #1ポモドーロ何分か self.sBreakTime = 5 #短い休みは何分 self.lBreakTime = 15 #長い休みは何分 self.minute = 60 #1分間は self.workComment = "Working" self.shortBreakComment = "Break" self.longBreakComment = "LongBreak" self.audio = Audio.Audio() self.audio.setAudio_file("bell.mp3") self.pomodoroCount = self.db.getTodaysPomodoro() #今日何回ポモドーロしたか self.dayend = 0 #1日の終わりを何時にするか(0~23) today = datetime.datetime.today() #今日の日付 #次の1日の終わり self.nextEndDatetime = datetime.datetime.today().\ replace(hour = self.dayend, minute = 0, second = 0, microsecond = 0) #もし今のhourがdayendより前ならenddatetimeは今日 if today.hour < self.dayend: self.nextEndDatetime = self.nextEndDatetime.replace(day = today.day) #もし今のhourがdayendより後ならenddatatimeは明日 if today.hour >= self.dayend: self.nextEndDatetime = self.nextEndDatetime.replace(day = today.today().day + 1) def endOfDay(self): ''' 1日が終わった時に呼び出される関数 countをリセットし、次の1日が終わる時間を作る ''' self.initPomodoroCount();<|fim▁hole|> self.nextEndDatetime = self.nextEndDatetime.replace(day = today.day) #もし今のhourがdayendより後ならenddatatimeは明日 if today.hour >= self.dayend: self.nextEndDatetime = self.nextEndDatetime.replace(day = today.today().day + 1) def initPomodoroCount(self): self.pomodoroCount = 0 def setNofPomodoro(self, num): ''' ポモドーロ何回で長い休みにするかを設定 ''' self.NofPomodoro = num def setworkTime(self, m): ''' 1ポモドーロは何分にするかを設定 ''' self.workTime = m def setsBreakTime(self, m): ''' 短い休みを何分にするかを設定 ''' self.sBreakTime = m def setlBreakTime(self, m): ''' 長い休みを何分にするかを設定 ''' self.lBreakTime = m def start_work(self): ''' 仕事タイマー起動 ''' self.t.initStrings() self.t.setStrings(self.workComment) self.t.setStrings(self.u.count_str(self.pomodoroCount)) return self.t.start_Timer(self.workTime*self.minute) def start_s_Break(self): ''' おやすみタイマー起動 ''' self.t.initStrings() self.t.setStrings(self.shortBreakComment) self.t.setStrings(self.u.count_str(self.pomodoroCount)) return self.t.start_Timer(self.sBreakTime*self.minute) def start_l_Break(self): ''' 長いおやすみタイマー起動 ''' self.t.initStrings() self.t.setStrings(self.longBreakComment) self.t.setStrings(self.u.count_str(self.pomodoroCount)) return self.t.start_Timer(self.lBreakTime*self.minute) def pomodoro(self): ''' pomodoro本体 ''' bFlag = False while 1: n = 0 while n < self.NofPomodoro: if self.start_work() == False: bFlag = True break self.audio.subthread_play() self.pomodoroCount += 1 self.db.insertPomodoro() #もし1日が終わったら if datetime.datetime.today() > self.nextEndDatetime: self.endOfDay() if self.start_s_Break() == False: bFlag = True break self.audio.subthread_play() n += 1 if bFlag == True: break if self.start_l_Break() == False: break self.audio.subthread_play() def show_pomodoro(self): ''' pomodoroした回数を表示する ''' #今週の配列を初期化 thisWeek = [0 for i in range(7)] #先週の配列を初期化 lastWeek = [0 for i in range(7)] #今日の曜日を取得 day = datetime.date.today() dayofweek = day.weekday() #今週分の回数を取得 while dayofweek >= 0: thisWeek[dayofweek] = self.db.getdaysPomodoro(day.year, day.month, day.day) dayofweek -= 1 day = day - datetime.timedelta(days=1) for i in range(6, -1, -1): lastWeek[i] = self.db.getdaysPomodoro(day.year, day.month, day.day) day = day - datetime.timedelta(days=1) print("今週") print("月 火 水 木 金 土 日") for i in thisWeek: print('{0:02d}'.format(i), end=" ") print() print("先週") print("月 火 水 木 金 土 日") for i in lastWeek: print('{0:02d}'.format(i), end=" ") print()<|fim▁end|>
today = datetime.datetime.today() #もし今のhourがdayendより前ならenddatetimeは今日 if today.hour < self.dayend:
<|file_name|>test_logger.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import logging from unittest import mock import olympia.core.logger from olympia.amo.tests import TestCase from olympia.users.models import UserProfile class LoggerTests(TestCase): @mock.patch('olympia.core.get_remote_addr', lambda: '127.0.0.1') @mock.patch('olympia.core.get_user', lambda: UserProfile(username=u'fôo')) def test_get_logger_adapter(self): log = olympia.core.logger.getLogger('test') expected_kwargs = { 'extra': { 'REMOTE_ADDR': '127.0.0.1', 'USERNAME': u'fôo', } } assert log.process('test msg', {}) == ('test msg', expected_kwargs) @mock.patch('olympia.core.get_remote_addr', lambda: '127.0.0.1') @mock.patch('olympia.core.get_user', lambda: None)<|fim▁hole|> 'REMOTE_ADDR': '127.0.0.1', 'USERNAME': '<anon>', } } assert log.process('test msg', {}) == ('test msg', expected_kwargs) @mock.patch('olympia.core.get_remote_addr', lambda: None) @mock.patch('olympia.core.get_user', lambda: UserProfile(username='bar')) def test_logger_adapter_addr_is_none(self): log = olympia.core.logger.getLogger('test') expected_kwargs = { 'extra': { 'REMOTE_ADDR': '', 'USERNAME': 'bar', } } assert log.process('test msg', {}) == ('test msg', expected_kwargs) def test_formatter(self): formatter = olympia.core.logger.Formatter() record = logging.makeLogRecord({}) formatter.format(record) assert 'USERNAME' in record.__dict__ assert 'REMOTE_ADDR' in record.__dict__ def test_json_formatter(self): formatter = olympia.core.logger.JsonFormatter() record = logging.makeLogRecord({}) # These would be set by the adapter. record.__dict__['USERNAME'] = 'foo' record.__dict__['REMOTE_ADDR'] = '127.0.0.1' formatter.format(record) assert record.__dict__['uid'] == 'foo' assert record.__dict__['remoteAddressChain'] == '127.0.0.1'<|fim▁end|>
def test_logger_adapter_user_is_none(self): log = olympia.core.logger.getLogger('test') expected_kwargs = { 'extra': {
<|file_name|>timer.js<|end_file_name|><|fim▁begin|>export class Timer { /** * diff 2 `Date` in Millisecond * @param {Date} since * @param {Date} until */ static diff(since, until) { return (until.getTime() - since.getTime()); } constructor() { /** time elapsed in millisecond */ this.sum = 0; this.running = false; this.begin = new Date(0); } set(millisecond) { this.sum = Math.trunc(millisecond); if (this.running) { this.begin = new Date(); } } offset(offset) { const target = this.get() + Math.trunc(offset); this.set(target); } get() { if (!this.running) return this.sum; return this.sum + Timer.diff(this.begin, new Date());<|fim▁hole|> start() { if (!this.running) { this.running = true; this.begin = new Date(); } } stop() { if (this.running) { this.running = false; this.sum += Timer.diff(this.begin, new Date()); } } /** clear elapsed time, then start */ clear() { this.sum = 0; this.running = true; this.begin = new Date(); } /** clear elapsed time, but do __NOT__ start */ reset() { this.sum = 0; this.running = false; this.begin = new Date(0); } }<|fim▁end|>
}
<|file_name|>l3_match.py<|end_file_name|><|fim▁begin|>import re import string_example """ One small letter, surrounded by EXACTLY three big bodyguards on each of its sides. """ class StingMatch(object): """String match """ def __init__(self): self.msg = string_example.string def match(self): Pattern = '[^A-Z][A-Z]{3}[a-z][A-Z]{3}[^A-Z]' result = re.findall(Pattern, self.msg) print (''.join(x[4:5] for x in result)) def main(): test = StingMatch() test.match() if __name__ == '__main__':<|fim▁hole|><|fim▁end|>
main()
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!python2.7 ''' Created on Jul 13, 2014 @author: cilia ''' import os.path import os import time import sys import tempfile from utils.path import loadModule import utils conf = utils.get_conf() def run_task(task_conf): # import pdb; pdb.set_trace() print 'run task:', task_conf['task name'] task_func = loadModule(task_conf['task method']) task_param = eval(task_conf['task parameters']) task_func(**task_param) def main(argv):<|fim▁hole|> task_conf_name = argv[0] if os.path.exists(task_conf_name): task_confs = eval(open(task_conf_name).read()) else: task_conf_name += '%s%s%s' % (os.path.dirname(__file__), '../conf/', task_conf_name) if os.path.exists(task_conf_name): task_confs = eval(open(task_conf_name)) else: print 'task configuration file does not exist' sys.exit(3) for task_conf in task_confs: run_task(task_conf) if not conf['debug']: 'clean cache files' cache_path = tempfile.gettempdir() file_names = [file_name for file_name in os.listdir(cache_path) if file_name.startswith('rupee.cache')] for file_name in file_names: print 'remove file:', cache_path+'/'+file_name os.remove(cache_path+'/'+file_name) if __name__ == '__main__': main(sys.argv[1:])<|fim▁end|>
<|file_name|>timeandtweet.py<|end_file_name|><|fim▁begin|>from twitter import * import simplejson import serial import datetime import time import threading QUIT = 0 prevtweet = "" def centerstring(string,width): """ Pad a string to a specific width """ return " "*((width-len(string))/2)+string def padstring(string,width): """pad a string to a maximum length""" if len(string) > width: result = string[0:width] else: result = string + " "*(width-len(string)) return result def runtime(): rangthebell = 0 while QUIT == 0: # dates = centerstring(datetime.datetime.now().strftime("%B %d, %Y"),20) # times = centerstring(datetime.datetime.now().strftime("%I:%M:%S %p"),20) # # p.write("\x80") # p.write("%s\r%s" % (dates,times)) dates = datetime.datetime.now().isoformat(' ')[0:19] p.write("\x80") # move to 0,0 on the display p.write(padstring(dates,20)) # make sure to have a nice clean line by filling it all out if datetime.datetime.now().strftime("%M")[-1:] == "5": if rangthebell == 0: p.write("\xD2\xE1\xD1\xE4\xD2\xE1") # do an anoying beep at the minute mark rangthebell = 1 else: rangthebell = 0 time.sleep(1) def checktweet(): turl = 'http://api.twitter.com/1.1/search/tweets.json?q=' CONSUMER_KEY = 'xxx' CONSUMER_SECRET = 'xxx' OAUTH_TOKEN = 'XXX' OAUTH_SECRET = 'XXX' t = Twitter( auth=OAuth(OAUTH_TOKEN,OAUTH_SECRET,CONSUMER_KEY,CONSUMER_SECRET) ) prevtweet = "" while QUIT == 0: twitter_results = t.statuses.home_timeline() tweet = twitter_results[0]['text'].encode('ascii','ignore') # convert to ascii and ignore unicode conv. errors if prevtweet != tweet: # p.write("\xA8") # second line 0 position (line 3 on the display) p.write("\x94") # first line 0 position (line 2 on the display) p.write(padstring(tweet,60)) p.write("\xD2\xE7\xD1\xE1\xD2\xE5") print "-"*150 print "From: %s" % twitter_results[0]['user']['screen_name'] print tweet print "-"*150 prevtweet = tweet seconds = 0 while seconds < 180: time.sleep (1) seconds += 1 p.write("\xCD") p.write("%03d" % (180-seconds)) if QUIT: break p.write("\xD0\xE7\xE2\xE2") #time.sleep(60) if __name__ == "__main__": # open up the serial port p = serial.Serial("/dev/ttyAMA0", baudrate=19200, timeout=2) p.write("starting the clock!") # clear the screen and get ready to display with backlight on p.write("\x16") # turns it on with no cursor blink p.write("\x11") # turn on the back light p.write("\x0C") # clear the screen. Must wait 5ms before we move on t1 = threading.Thread(target = runtime, args=()) t2 = threading.Thread(target = checktweet, args=()) t1.start() t2.start() try: while 1: time.sleep(.1) except KeyboardInterrupt: print "Quiting" QUIT = 1 print "Exiting clock" t1.join() print "Exiting tweet" t2.join() print "Exits complete" p.write("\x15") # turns display off, but not backlight p.write("\x12") # turns backlight off<|fim▁hole|> pass print 'exiting'<|fim▁end|>
p.close() QUIT = 1
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Tkacz, a library pub mod config; pub mod query; pub mod store; pub mod taxonomy; pub mod thing; pub mod types; pub use crate::query::{Query, QueryParser}; pub use crate::store::Store; pub use crate::taxonomy::{ContentSource, Taxon}; pub use crate::thing::Thing; pub use crate::types::{Class, Named, NamedType, NamedValue, Object, Type, Value}; <|fim▁hole|>pub const VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub type Id = i64; pub const TKACZ_DIR_NAME: &str = "tkacz";<|fim▁end|>
<|file_name|>LibflacAudioRenderer.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.ext.flac; import android.os.Handler; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.SimpleDecoderAudioRenderer; import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.ExoMediaCrypto; import com.google.android.exoplayer2.util.MimeTypes; /** * Decodes and renders audio using the native Flac decoder. */ public class LibflacAudioRenderer extends SimpleDecoderAudioRenderer { private static final int NUM_BUFFERS = 16; public LibflacAudioRenderer() { this(null, null); } /** * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output. */ public LibflacAudioRenderer( Handler eventHandler, AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) { super(eventHandler, eventListener, audioProcessors); } @Override protected int supportsFormatInternal(DrmSessionManager<ExoMediaCrypto> drmSessionManager, Format format) { if (!MimeTypes.AUDIO_FLAC.equalsIgnoreCase(format.sampleMimeType)) { return FORMAT_UNSUPPORTED_TYPE; } else if (!supportsOutput(format.channelCount, C.ENCODING_PCM_16BIT)) { return FORMAT_UNSUPPORTED_SUBTYPE; } else if (!supportsFormatDrm(drmSessionManager, format.drmInitData)) { return FORMAT_UNSUPPORTED_DRM; } else { return FORMAT_HANDLED; } } @Override protected FlacDecoder createDecoder(Format format, ExoMediaCrypto mediaCrypto) throws FlacDecoderException { return new FlacDecoder( NUM_BUFFERS, NUM_BUFFERS, format.maxInputSize, format.initializationData); } }<|fim▁end|>
<|file_name|>Slider.tsx<|end_file_name|><|fim▁begin|>import React, { FunctionComponent } from 'react'; import { Range, createSliderWithTooltip } from 'rc-slider'; import { cx, css } from 'emotion'; import { Global, css as cssCore } from '@emotion/core'; import { stylesFactory } from '../../themes'; import { GrafanaTheme } from '@savantly/sprout-api'; import { useTheme } from '../../themes/ThemeContext'; import { Orientation } from '../../types/orientation'; export interface Props { min: number; max: number; orientation?: Orientation; /** Set current positions of handle(s). If only 1 value supplied, only 1 handle displayed. */ value?: number[]; reverse?: boolean; step?: number; tooltipAlwaysVisible?: boolean; formatTooltipResult?: (value: number) => number | string; onChange?: (values: number[]) => void; onAfterChange?: (values: number[]) => void; } const getStyles = stylesFactory((theme: GrafanaTheme, isHorizontal: boolean) => { const trackColor = theme.isLight ? theme.palette.gray5 : theme.palette.dark6; const container = isHorizontal ? css` width: 100%; margin: ${theme.spacing.lg} ${theme.spacing.sm} ${theme.spacing.sm} ${theme.spacing.sm}; ` : css` height: 100%; margin: ${theme.spacing.sm} ${theme.spacing.lg} ${theme.spacing.sm} ${theme.spacing.sm}; `; return { container, slider: css` .rc-slider-vertical .rc-slider-handle { margin-top: -10px; } .rc-slider-handle { border: solid 2px ${theme.palette.blue77}; background-color: ${theme.palette.blue77}; } .rc-slider-handle:hover { border-color: ${theme.palette.blue77}; } .rc-slider-handle:focus { border-color: ${theme.palette.blue77}; box-shadow: none; } .rc-slider-handle:active { border-color: ${theme.palette.blue77}; box-shadow: none;<|fim▁hole|> } .rc-slider-handle-click-focused:focus { border-color: ${theme.palette.blue77}; } .rc-slider-dot-active { border-color: ${theme.palette.blue77}; } .rc-slider-track { background-color: ${theme.palette.blue77}; } .rc-slider-rail { background-color: ${trackColor}; border: 1px solid ${trackColor}; } `, /** Global component from @emotion/core doesn't accept computed classname string returned from css from emotion. * It accepts object containing the computed name and flattened styles returned from css from @emotion/core * */ tooltip: cssCore` body { .rc-slider-tooltip { cursor: grab; user-select: none; z-index: ${theme.zIndex.tooltip}; } .rc-slider-tooltip-inner { color: ${theme.colors.text}; background-color: transparent !important; border-radius: 0; box-shadow: none; } .rc-slider-tooltip-placement-top .rc-slider-tooltip-arrow { display: none; } .rc-slider-tooltip-placement-top { padding: 0; } } `, }; }); export const Slider: FunctionComponent<Props> = ({ min, max, onChange, onAfterChange, orientation = 'horizontal', reverse, step, formatTooltipResult, value, tooltipAlwaysVisible = true, }) => { const isHorizontal = orientation === 'horizontal'; const theme = useTheme(); const styles = getStyles(theme, isHorizontal); const RangeWithTooltip = createSliderWithTooltip(Range); return ( <div className={cx(styles.container, styles.slider)}> {/** Slider tooltip's parent component is body and therefore we need Global component to do css overrides for it. */} <Global styles={styles.tooltip} /> <RangeWithTooltip tipProps={{ visible: tooltipAlwaysVisible, placement: isHorizontal ? 'top' : 'right', }} min={min} max={max} step={step} defaultValue={value || [min, max]} tipFormatter={(value: number) => (formatTooltipResult ? formatTooltipResult(value) : value)} onChange={onChange} onAfterChange={onAfterChange} vertical={!isHorizontal} reverse={reverse} /> </div> ); }; Slider.displayName = 'Slider';<|fim▁end|>
<|file_name|>Wall.java<|end_file_name|><|fim▁begin|>package com.sadc.game.gameobject.trackobject; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.sadc.game.GameConstants; import com.sadc.game.gameobject.GameUtils; import com.sadc.game.gameobject.Player; /** * @author f536985 (Tom Farello) */ public class Wall extends TrackObject { public Wall(float distance, float angle) { setActive(true); setDistance(distance); setAngle(angle); setWidth(22); setTexture(new Texture("brickWall.png")); } @Override public void update(float delta, Player player) { if (collide(player)) { player.crash(); setActive(false); }<|fim▁hole|> } @Override public void draw(float delta, float playerDistance, SpriteBatch spriteBatch) { float drawDistance = (float)Math.pow(2 , playerDistance - (getDistance())); GameUtils.setColorByDrawDistance(drawDistance, spriteBatch); spriteBatch.draw(getTexture(), GameConstants.SCREEN_WIDTH / 2 - 50, 15, 50, GameConstants.SCREEN_HEIGHT / 2 - 15, 100, 70, drawDistance, drawDistance, getAngle(), 0, 0, 100, 70, false, false); } }<|fim▁end|>
<|file_name|>s3-object-lambda.rs<|end_file_name|><|fim▁begin|>/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ // snippet-start:[s3.rust.s3-object-lambda-packages] use aws_endpoint::partition; use aws_endpoint::partition::endpoint; use aws_endpoint::{CredentialScope, Partition, PartitionResolver}; use aws_sdk_s3 as s3; // snippet-end:[s3.rust.s3-object-lambda-packages] use aws_sdk_s3::{Client, Error, PKG_VERSION}; use structopt::StructOpt; #[derive(Debug, StructOpt)] struct Opt { /// Your account #. #[structopt(short, long)] account: String, /// The endpoint. #[structopt(short, long)] endpoint: String, /// Whether to display additional information. #[structopt(short, long)] verbose: bool, } // Shows your buckets in the endpoint. async fn show_buckets(client: &Client) -> Result<(), Error> { let resp = client.list_buckets().send().await?; let buckets = resp.buckets().unwrap_or_default(); let num_buckets = buckets.len(); for bucket in buckets { println!("{}", bucket.name().as_deref().unwrap_or_default()); } println!(); println!("Found {} buckets.", num_buckets); Ok(()) } // If you're using a FIPs region, add `-fips` after `s3-object-lambda`. async fn make_uri(endpoint: &str, account: &str) -> &'static str { let mut uri = endpoint.to_string(); uri.push('-'); uri.push_str(account); uri.push_str(".s3-object-lambda.{region}.amazonaws.com"); Box::leak(uri.into_boxed_str()) } /// Lists your Amazon S3 buckets in the specified endpoint. /// # Arguments /// /// * `-a ACCOUNT` - Your AWS account number. /// * `-e ENDPOINT` - The endpoint in which the client is created. /// * `[-v]` - Whether to display additional information. #[tokio::main] async fn main() -> Result<(), Error> { tracing_subscriber::fmt::init(); let Opt { account, endpoint, verbose, } = Opt::from_args(); println!(); if verbose { println!("S3 client version: {}", PKG_VERSION); println!("Account #: {}", &account); println!("Endpoint: {}", &endpoint); <|fim▁hole|> // snippet-start:[s3.rust.s3-object-lambda] // Create an endpoint resolver that creates S3 Object Lambda endpoints. let resolver = PartitionResolver::new( Partition::builder() .id("aws") // This regex captures the region prefix, such as the "us" in "us-east-1", // from the client's current region. This captured value is later fed into // the uri_template. // If your region isn't covered by the regex below, // you can find additional region capture regexes for other regions // at https://github.com/awslabs/aws-sdk-rust/blob/main/sdk/s3/src/aws_endpoint.rs. .region_regex(r#"^(us|eu|ap|sa|ca|me|af)\-\w+\-\d+$"#) .default_endpoint(endpoint::Metadata { uri_template: make_uri(&endpoint, &account).await, protocol: endpoint::Protocol::Https, signature_versions: endpoint::SignatureVersion::V4, // Important: The following overrides the credential scope so that request signing works. credential_scope: CredentialScope::builder() .service("s3-object-lambda") .build(), }) .regionalized(partition::Regionalized::Regionalized) .build() .expect("valid partition"), vec![], ); // Load configuration and credentials from the environment. let shared_config = aws_config::load_from_env().await; // Create an S3 config from the shared config and override the endpoint resolver. let s3_config = s3::config::Builder::from(&shared_config) .endpoint_resolver(resolver) .build(); // Create an S3 client to send requests to S3 Object Lambda. let client = s3::Client::from_conf(s3_config); // snippet-end:[s3.rust.s3-object-lambda] show_buckets(&client).await }<|fim▁end|>
println!(); }
<|file_name|>SNAClassNames.java<|end_file_name|><|fim▁begin|>/* * @author Flavio Keller * * Copyright 2014 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * */ package com.signalcollect.sna.constants; /** * Enumeration for the different classes that can occur * when running a SNA method algorithm */ public enum SNAClassNames { DEGREE("Degree"), PAGERANK("PageRank"), CLOSENESS("Closeness"), BETWEENNESS("Betweenness"), PATH("Path"), LOCALCLUSTERCOEFFICIENT( "LocalClusterCoefficient"), TRIADCENSUS("Triad Census"), LABELPROPAGATION( "Label Propagation") ; private final String className; SNAClassNames(String name) { this.className = name; } }<|fim▁end|>
* distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License.
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364) #![cfg_attr(stage0, feature(custom_attribute))] #![crate_name = "rustc_privacy"] #![unstable(feature = "rustc_private", issue = "27812")] #![staged_api] #![crate_type = "dylib"] #![crate_type = "rlib"] #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] #![feature(staged_api)] #[macro_use] extern crate log; #[macro_use] extern crate syntax; extern crate rustc; use self::PrivacyResult::*; use self::FieldName::*; use std::mem::replace; use rustc::ast_map; use rustc::middle::def; use rustc::middle::privacy::ImportUse::*; use rustc::middle::privacy::LastPrivate::*; use rustc::middle::privacy::PrivateDep::*; use rustc::middle::privacy::{ExternalExports, ExportedItems, PublicItems}; use rustc::middle::ty::{self, Ty}; use rustc::util::nodemap::{NodeMap, NodeSet}; use syntax::ast; use syntax::ast_util::{is_local, local_def}; use syntax::codemap::Span; use syntax::visit::{self, Visitor}; type Context<'a, 'tcx> = (&'a ty::MethodMap<'tcx>, &'a def::ExportMap); /// Result of a checking operation - None => no errors were found. Some => an /// error and contains the span and message for reporting that error and /// optionally the same for a note about the error. type CheckResult = Option<(Span, String, Option<(Span, String)>)>; //////////////////////////////////////////////////////////////////////////////// /// The parent visitor, used to determine what's the parent of what (node-wise) //////////////////////////////////////////////////////////////////////////////// struct ParentVisitor { parents: NodeMap<ast::NodeId>, curparent: ast::NodeId, } impl<'v> Visitor<'v> for ParentVisitor { fn visit_item(&mut self, item: &ast::Item) { self.parents.insert(item.id, self.curparent); let prev = self.curparent; match item.node { ast::ItemMod(..) => { self.curparent = item.id; } // Enum variants are parented to the enum definition itself because // they inherit privacy ast::ItemEnum(ref def, _) => { for variant in &def.variants { // The parent is considered the enclosing enum because the // enum will dictate the privacy visibility of this variant // instead. self.parents.insert(variant.node.id, item.id); } } // Trait methods are always considered "public", but if the trait is // private then we need some private item in the chain from the // method to the root. In this case, if the trait is private, then // parent all the methods to the trait to indicate that they're // private. ast::ItemTrait(_, _, _, ref trait_items) if item.vis != ast::Public => { for trait_item in trait_items { self.parents.insert(trait_item.id, item.id); } } _ => {} } visit::walk_item(self, item); self.curparent = prev; } fn visit_foreign_item(&mut self, a: &ast::ForeignItem) { self.parents.insert(a.id, self.curparent); visit::walk_foreign_item(self, a); } fn visit_fn(&mut self, a: visit::FnKind<'v>, b: &'v ast::FnDecl, c: &'v ast::Block, d: Span, id: ast::NodeId) { // We already took care of some trait methods above, otherwise things // like impl methods and pub trait methods are parented to the // containing module, not the containing trait. if !self.parents.contains_key(&id) { self.parents.insert(id, self.curparent); } visit::walk_fn(self, a, b, c, d); } fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) { // visit_fn handles methods, but associated consts have to be handled // here. if !self.parents.contains_key(&ii.id) { self.parents.insert(ii.id, self.curparent); } visit::walk_impl_item(self, ii); } fn visit_struct_def(&mut self, s: &ast::StructDef, _: ast::Ident, _: &'v ast::Generics, n: ast::NodeId) { // Struct constructors are parented to their struct definitions because // they essentially are the struct definitions. match s.ctor_id { Some(id) => { self.parents.insert(id, n); } None => {} } // While we have the id of the struct definition, go ahead and parent // all the fields. for field in &s.fields { self.parents.insert(field.node.id, self.curparent); } visit::walk_struct_def(self, s) } } //////////////////////////////////////////////////////////////////////////////// /// The embargo visitor, used to determine the exports of the ast //////////////////////////////////////////////////////////////////////////////// struct EmbargoVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, export_map: &'a def::ExportMap, // This flag is an indicator of whether the previous item in the // hierarchical chain was exported or not. This is the indicator of whether // children should be exported as well. Note that this can flip from false // to true if a reexported module is entered (or an action similar). prev_exported: bool, // This is a list of all exported items in the AST. An exported item is any // function/method/item which is usable by external crates. This essentially // means that the result is "public all the way down", but the "path down" // may jump across private boundaries through reexport statements. exported_items: ExportedItems, // This sets contains all the destination nodes which are publicly // re-exported. This is *not* a set of all reexported nodes, only a set of // all nodes which are reexported *and* reachable from external crates. This // means that the destination of the reexport is exported, and hence the // destination must also be exported. reexports: NodeSet, // These two fields are closely related to one another in that they are only // used for generation of the 'PublicItems' set, not for privacy checking at // all public_items: PublicItems, prev_public: bool, } impl<'a, 'tcx> EmbargoVisitor<'a, 'tcx> { // There are checks inside of privacy which depend on knowing whether a // trait should be exported or not. The two current consumers of this are: // // 1. Should default methods of a trait be exported? // 2. Should the methods of an implementation of a trait be exported? // // The answer to both of these questions partly rely on whether the trait // itself is exported or not. If the trait is somehow exported, then the // answers to both questions must be yes. Right now this question involves // more analysis than is currently done in rustc, so we conservatively // answer "yes" so that all traits need to be exported. fn exported_trait(&self, _id: ast::NodeId) -> bool { true } } impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &ast::Item) { let orig_all_pub = self.prev_public; self.prev_public = orig_all_pub && item.vis == ast::Public; if self.prev_public { self.public_items.insert(item.id); } let orig_all_exported = self.prev_exported; match item.node { // impls/extern blocks do not break the "public chain" because they // cannot have visibility qualifiers on them anyway ast::ItemImpl(..) | ast::ItemDefaultImpl(..) | ast::ItemForeignMod(..) => {} // Traits are a little special in that even if they themselves are // not public they may still be exported. ast::ItemTrait(..) => { self.prev_exported = self.exported_trait(item.id); } // Private by default, hence we only retain the "public chain" if // `pub` is explicitly listed. _ => { self.prev_exported = (orig_all_exported && item.vis == ast::Public) || self.reexports.contains(&item.id); } } let public_first = self.prev_exported && self.exported_items.insert(item.id); match item.node { // Enum variants inherit from their parent, so if the enum is // public all variants are public unless they're explicitly priv ast::ItemEnum(ref def, _) if public_first => { for variant in &def.variants { self.exported_items.insert(variant.node.id); self.public_items.insert(variant.node.id); } } // Implementations are a little tricky to determine what's exported // out of them. Here's a few cases which are currently defined: // // * Impls for private types do not need to export their methods // (either public or private methods) // // * Impls for public types only have public methods exported // // * Public trait impls for public types must have all methods // exported. // // * Private trait impls for public types can be ignored // // * Public trait impls for private types have their methods // exported. I'm not entirely certain that this is the correct // thing to do, but I have seen use cases of where this will cause // undefined symbols at linkage time if this case is not handled. // // * Private trait impls for private types can be completely ignored ast::ItemImpl(_, _, _, _, ref ty, ref impl_items) => { let public_ty = match ty.node { ast::TyPath(..) => { match self.tcx.def_map.borrow().get(&ty.id).unwrap().full_def() { def::DefPrimTy(..) => true, def => { let did = def.def_id(); !is_local(did) || self.exported_items.contains(&did.node) } } } _ => true, }; let tr = self.tcx.impl_trait_ref(local_def(item.id)); let public_trait = tr.clone().map_or(false, |tr| { !is_local(tr.def_id) || self.exported_items.contains(&tr.def_id.node) }); if public_ty || public_trait { for impl_item in impl_items { match impl_item.node { ast::ConstImplItem(..) => { if (public_ty && impl_item.vis == ast::Public) || tr.is_some() { self.exported_items.insert(impl_item.id); } } ast::MethodImplItem(ref sig, _) => { let meth_public = match sig.explicit_self.node { ast::SelfStatic => public_ty, _ => true, } && impl_item.vis == ast::Public; if meth_public || tr.is_some() { self.exported_items.insert(impl_item.id); } } ast::TypeImplItem(_) | ast::MacImplItem(_) => {} } } } } // Default methods on traits are all public so long as the trait // is public ast::ItemTrait(_, _, _, ref trait_items) if public_first => { for trait_item in trait_items { debug!("trait item {}", trait_item.id); self.exported_items.insert(trait_item.id); } } // Struct constructors are public if the struct is all public. ast::ItemStruct(ref def, _) if public_first => { match def.ctor_id { Some(id) => { self.exported_items.insert(id); } None => {} } // fields can be public or private, so lets check for field in &def.fields { let vis = match field.node.kind { ast::NamedField(_, vis) | ast::UnnamedField(vis) => vis }; if vis == ast::Public { self.public_items.insert(field.node.id); } } } ast::ItemTy(ref ty, _) if public_first => { if let ast::TyPath(..) = ty.node { match self.tcx.def_map.borrow().get(&ty.id).unwrap().full_def() { def::DefPrimTy(..) | def::DefTyParam(..) => {}, def => { let did = def.def_id(); if is_local(did) { self.exported_items.insert(did.node); } } } } } _ => {} } visit::walk_item(self, item); self.prev_exported = orig_all_exported; self.prev_public = orig_all_pub; } fn visit_foreign_item(&mut self, a: &ast::ForeignItem) { if (self.prev_exported && a.vis == ast::Public) || self.reexports.contains(&a.id) { self.exported_items.insert(a.id); } } fn visit_mod(&mut self, m: &ast::Mod, _sp: Span, id: ast::NodeId) { // This code is here instead of in visit_item so that the // crate module gets processed as well. if self.prev_exported { assert!(self.export_map.contains_key(&id), "wut {}", id); for export in self.export_map.get(&id).unwrap() { if is_local(export.def_id) { self.reexports.insert(export.def_id.node); } } } visit::walk_mod(self, m) } } //////////////////////////////////////////////////////////////////////////////// /// The privacy visitor, where privacy checks take place (violations reported) //////////////////////////////////////////////////////////////////////////////// struct PrivacyVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, curitem: ast::NodeId, in_foreign: bool, parents: NodeMap<ast::NodeId>, external_exports: ExternalExports, } enum PrivacyResult { Allowable, ExternallyDenied, DisallowedBy(ast::NodeId), } enum FieldName { UnnamedField(usize), // index // (Name, not Ident, because struct fields are not macro-hygienic) NamedField(ast::Name), } impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { // used when debugging fn nodestr(&self, id: ast::NodeId) -> String { self.tcx.map.node_to_string(id).to_string() } // Determines whether the given definition is public from the point of view // of the current item. fn def_privacy(&self, did: ast::DefId) -> PrivacyResult { if !is_local(did) { if self.external_exports.contains(&did) { debug!("privacy - {:?} was externally exported", did); return Allowable; } debug!("privacy - is {:?} a public method", did); return match self.tcx.impl_or_trait_items.borrow().get(&did) { Some(&ty::ConstTraitItem(ref ac)) => { debug!("privacy - it's a const: {:?}", *ac); match ac.container { ty::TraitContainer(id) => { debug!("privacy - recursing on trait {:?}", id); self.def_privacy(id) } ty::ImplContainer(id) => { match self.tcx.impl_trait_ref(id) { Some(t) => { debug!("privacy - impl of trait {:?}", id); self.def_privacy(t.def_id) } None => { debug!("privacy - found inherent \ associated constant {:?}", ac.vis); if ac.vis == ast::Public { Allowable } else { ExternallyDenied } } } } } } Some(&ty::MethodTraitItem(ref meth)) => { debug!("privacy - well at least it's a method: {:?}", *meth); match meth.container { ty::TraitContainer(id) => { debug!("privacy - recursing on trait {:?}", id); self.def_privacy(id) } ty::ImplContainer(id) => { match self.tcx.impl_trait_ref(id) { Some(t) => { debug!("privacy - impl of trait {:?}", id); self.def_privacy(t.def_id) } None => { debug!("privacy - found a method {:?}", meth.vis); if meth.vis == ast::Public { Allowable } else { ExternallyDenied } } } } } } Some(&ty::TypeTraitItem(ref typedef)) => { match typedef.container { ty::TraitContainer(id) => { debug!("privacy - recursing on trait {:?}", id); self.def_privacy(id) } ty::ImplContainer(id) => { match self.tcx.impl_trait_ref(id) { Some(t) => { debug!("privacy - impl of trait {:?}", id); self.def_privacy(t.def_id) } None => { debug!("privacy - found a typedef {:?}", typedef.vis); if typedef.vis == ast::Public { Allowable } else { ExternallyDenied } } } } } } None => { debug!("privacy - nope, not even a method"); ExternallyDenied } }; } debug!("privacy - local {} not public all the way down", self.tcx.map.node_to_string(did.node)); // return quickly for things in the same module if self.parents.get(&did.node) == self.parents.get(&self.curitem) { debug!("privacy - same parent, we're done here"); return Allowable; } // We now know that there is at least one private member between the // destination and the root. let mut closest_private_id = did.node; loop { debug!("privacy - examining {}", self.nodestr(closest_private_id)); let vis = match self.tcx.map.find(closest_private_id) { // If this item is a method, then we know for sure that it's an // actual method and not a static method. The reason for this is // that these cases are only hit in the ExprMethodCall // expression, and ExprCall will have its path checked later // (the path of the trait/impl) if it's a static method. // // With this information, then we can completely ignore all // trait methods. The privacy violation would be if the trait // couldn't get imported, not if the method couldn't be used // (all trait methods are public). // // However, if this is an impl method, then we dictate this // decision solely based on the privacy of the method // invocation. // FIXME(#10573) is this the right behavior? Why not consider // where the method was defined? Some(ast_map::NodeImplItem(ii)) => { match ii.node { ast::ConstImplItem(..) | ast::MethodImplItem(..) => { let imp = self.tcx.map .get_parent_did(closest_private_id); match self.tcx.impl_trait_ref(imp) { Some(..) => return Allowable, _ if ii.vis == ast::Public => { return Allowable } _ => ii.vis } } ast::TypeImplItem(_) | ast::MacImplItem(_) => return Allowable, } } Some(ast_map::NodeTraitItem(_)) => { return Allowable; } // This is not a method call, extract the visibility as one // would normally look at it Some(ast_map::NodeItem(it)) => it.vis, Some(ast_map::NodeForeignItem(_)) => { self.tcx.map.get_foreign_vis(closest_private_id) } Some(ast_map::NodeVariant(..)) => { ast::Public // need to move up a level (to the enum) } _ => ast::Public, }; if vis != ast::Public { break } // if we've reached the root, then everything was allowable and this // access is public. if closest_private_id == ast::CRATE_NODE_ID { return Allowable } closest_private_id = *self.parents.get(&closest_private_id).unwrap(); // If we reached the top, then we were public all the way down and // we can allow this access. if closest_private_id == ast::DUMMY_NODE_ID { return Allowable } } debug!("privacy - closest priv {}", self.nodestr(closest_private_id)); if self.private_accessible(closest_private_id) { Allowable } else { DisallowedBy(closest_private_id) } } /// For a local private node in the AST, this function will determine /// whether the node is accessible by the current module that iteration is /// inside. fn private_accessible(&self, id: ast::NodeId) -> bool { let parent = *self.parents.get(&id).unwrap(); debug!("privacy - accessible parent {}", self.nodestr(parent)); // After finding `did`'s closest private member, we roll ourselves back // to see if this private member's parent is anywhere in our ancestry. // By the privacy rules, we can access all of our ancestor's private // members, so that's why we test the parent, and not the did itself. let mut cur = self.curitem; loop { debug!("privacy - questioning {}, {}", self.nodestr(cur), cur); match cur { // If the relevant parent is in our history, then we're allowed // to look inside any of our ancestor's immediate private items, // so this access is valid. x if x == parent => return true, // If we've reached the root, then we couldn't access this item // in the first place ast::DUMMY_NODE_ID => return false, // Keep going up _ => {} } cur = *self.parents.get(&cur).unwrap(); } } fn report_error(&self, result: CheckResult) -> bool { match result { None => true, Some((span, msg, note)) => { self.tcx.sess.span_err(span, &msg[..]); match note { Some((span, msg)) => { self.tcx.sess.span_note(span, &msg[..]) } None => {}, } false }, } } /// Guarantee that a particular definition is public. Returns a CheckResult /// which contains any errors found. These can be reported using `report_error`. /// If the result is `None`, no errors were found. fn ensure_public(&self, span: Span, to_check: ast::DefId, source_did: Option<ast::DefId>, msg: &str) -> CheckResult { let id = match self.def_privacy(to_check) { ExternallyDenied => { return Some((span, format!("{} is private", msg), None)) } Allowable => return None, DisallowedBy(id) => id, }; // If we're disallowed by a particular id, then we attempt to give a // nice error message to say why it was disallowed. It was either // because the item itself is private or because its parent is private // and its parent isn't in our ancestry. let (err_span, err_msg) = if id == source_did.unwrap_or(to_check).node { return Some((span, format!("{} is private", msg), None)); } else { (span, format!("{} is inaccessible", msg)) }; let item = match self.tcx.map.find(id) { Some(ast_map::NodeItem(item)) => { match item.node { // If an impl disallowed this item, then this is resolve's // way of saying that a struct/enum's static method was // invoked, and the struct/enum itself is private. Crawl // back up the chains to find the relevant struct/enum that // was private. ast::ItemImpl(_, _, _, _, ref ty, _) => { match ty.node { ast::TyPath(..) => {} _ => return Some((err_span, err_msg, None)), }; let def = self.tcx.def_map.borrow().get(&ty.id).unwrap().full_def(); let did = def.def_id(); assert!(is_local(did)); match self.tcx.map.get(did.node) { ast_map::NodeItem(item) => item, _ => self.tcx.sess.span_bug(item.span, "path is not an item") } } _ => item } } Some(..) | None => return Some((err_span, err_msg, None)), }; let desc = match item.node { ast::ItemMod(..) => "module", ast::ItemTrait(..) => "trait", ast::ItemStruct(..) => "struct", ast::ItemEnum(..) => "enum", _ => return Some((err_span, err_msg, None)) }; let msg = format!("{} `{}` is private", desc, item.ident); Some((err_span, err_msg, Some((span, msg)))) } // Checks that a field is in scope. fn check_field(&mut self, span: Span, def: ty::AdtDef<'tcx>, v: ty::VariantDef<'tcx>, name: FieldName) { let field = match name { NamedField(f_name) => { debug!("privacy - check named field {} in struct {:?}", f_name, def); v.field_named(f_name) } UnnamedField(idx) => &v.fields[idx] }; if field.vis == ast::Public || (is_local(field.did) && self.private_accessible(field.did.node)) { return } let struct_desc = match def.adt_kind() { ty::AdtKind::Struct => format!("struct `{}`", self.tcx.item_path_str(def.did)), // struct variant fields have inherited visibility ty::AdtKind::Enum => return }; let msg = match name { NamedField(name) => format!("field `{}` of {} is private", name, struct_desc), UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; self.tcx.sess.span_err(span, &msg[..]); } // Given the ID of a method, checks to ensure it's in scope. fn check_static_method(&mut self, span: Span, method_id: ast::DefId, name: ast::Name) { // If the method is a default method, we need to use the def_id of // the default implementation. let method_id = match self.tcx.impl_or_trait_item(method_id) { ty::MethodTraitItem(method_type) => { method_type.provided_source.unwrap_or(method_id) } _ => { self.tcx.sess .span_bug(span, "got non-method item in check_static_method") } }; self.report_error(self.ensure_public(span, method_id, None, &format!("method `{}`", name))); } // Checks that a path is in scope. fn check_path(&mut self, span: Span, path_id: ast::NodeId, last: ast::Name) { debug!("privacy - path {}", self.nodestr(path_id)); let path_res = *self.tcx.def_map.borrow().get(&path_id).unwrap(); let ck = |tyname: &str| { let ck_public = |def: ast::DefId| { debug!("privacy - ck_public {:?}", def); let origdid = path_res.def_id(); self.ensure_public(span, def, Some(origdid), &format!("{} `{}`", tyname, last)) }; match path_res.last_private { LastMod(AllPublic) => {}, LastMod(DependsOn(def)) => { self.report_error(ck_public(def)); }, LastImport { value_priv, value_used: check_value, type_priv, type_used: check_type } => { // This dance with found_error is because we don't want to // report a privacy error twice for the same directive. let found_error = match (type_priv, check_type) { (Some(DependsOn(def)), Used) => { !self.report_error(ck_public(def)) }, _ => false, }; if !found_error { match (value_priv, check_value) { (Some(DependsOn(def)), Used) => { self.report_error(ck_public(def)); }, _ => {}, } } // If an import is not used in either namespace, we still // want to check that it could be legal. Therefore we check // in both namespaces and only report an error if both would // be illegal. We only report one error, even if it is // illegal to import from both namespaces. match (value_priv, check_value, type_priv, check_type) { (Some(p), Unused, None, _) | (None, _, Some(p), Unused) => { let p = match p { AllPublic => None, DependsOn(def) => ck_public(def), }; if p.is_some() { self.report_error(p); } }, (Some(v), Unused, Some(t), Unused) => { let v = match v { AllPublic => None, DependsOn(def) => ck_public(def), }; let t = match t { AllPublic => None, DependsOn(def) => ck_public(def), }; if let (Some(_), Some(t)) = (v, t) { self.report_error(Some(t)); } }, _ => {}, } }, } }; // FIXME(#12334) Imports can refer to definitions in both the type and // value namespaces. The privacy information is aware of this, but the // def map is not. Therefore the names we work out below will not always // be accurate and we can get slightly wonky error messages (but type // checking is always correct). match path_res.full_def() { def::DefFn(..) => ck("function"), def::DefStatic(..) => ck("static"), def::DefConst(..) => ck("const"), def::DefAssociatedConst(..) => ck("associated const"), def::DefVariant(..) => ck("variant"), def::DefTy(_, false) => ck("type"), def::DefTy(_, true) => ck("enum"), def::DefTrait(..) => ck("trait"), def::DefStruct(..) => ck("struct"), def::DefMethod(..) => ck("method"), def::DefMod(..) => ck("module"), _ => {} } } // Checks that a method is in scope. fn check_method(&mut self, span: Span, method_def_id: ast::DefId, name: ast::Name) { match self.tcx.impl_or_trait_item(method_def_id).container() { ty::ImplContainer(_) => { self.check_static_method(span, method_def_id, name) } // Trait methods are always all public. The only controlling factor // is whether the trait itself is accessible or not. ty::TraitContainer(trait_def_id) => { self.report_error(self.ensure_public(span, trait_def_id, None, "source trait")); } } } } impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &ast::Item) { if let ast::ItemUse(ref vpath) = item.node { if let ast::ViewPathList(ref prefix, ref list) = vpath.node { for pid in list { match pid.node { ast::PathListIdent { id, name, .. } => { debug!("privacy - ident item {}", id); self.check_path(pid.span, id, name.name); } ast::PathListMod { id, .. } => { debug!("privacy - mod item {}", id); let name = prefix.segments.last().unwrap().identifier.name; self.check_path(pid.span, id, name); } } } } } let orig_curitem = replace(&mut self.curitem, item.id); visit::walk_item(self, item); self.curitem = orig_curitem; } fn visit_expr(&mut self, expr: &ast::Expr) { match expr.node { ast::ExprField(ref base, ident) => { if let ty::TyStruct(def, _) = self.tcx.expr_ty_adjusted(&**base).sty { self.check_field(expr.span, def, def.struct_variant(), NamedField(ident.node.name)); } } ast::ExprTupField(ref base, idx) => { if let ty::TyStruct(def, _) = self.tcx.expr_ty_adjusted(&**base).sty { self.check_field(expr.span, def, def.struct_variant(), UnnamedField(idx.node)); } } ast::ExprMethodCall(ident, _, _) => { let method_call = ty::MethodCall::expr(expr.id); let method = self.tcx.tables.borrow().method_map[&method_call]; debug!("(privacy checking) checking impl method"); self.check_method(expr.span, method.def_id, ident.node.name); } ast::ExprStruct(..) => { let adt = self.tcx.expr_ty(expr).ty_adt_def().unwrap(); let variant = adt.variant_of_def(self.tcx.resolve_expr(expr)); // RFC 736: ensure all unmentioned fields are visible. // Rather than computing the set of unmentioned fields // (i.e. `all_fields - fields`), just check them all. for field in &variant.fields { self.check_field(expr.span, adt, variant, NamedField(field.name)); } } ast::ExprPath(..) => { if let def::DefStruct(_) = self.tcx.resolve_expr(expr) { let expr_ty = self.tcx.expr_ty(expr); let def = match expr_ty.sty { ty::TyBareFn(_, &ty::BareFnTy { sig: ty::Binder(ty::FnSig { output: ty::FnConverging(ty), .. }), ..}) => ty, _ => expr_ty }.ty_adt_def().unwrap(); let any_priv = def.struct_variant().fields.iter().any(|f| { f.vis != ast::Public && ( !is_local(f.did) || !self.private_accessible(f.did.node)) }); if any_priv { self.tcx.sess.span_err(expr.span, "cannot invoke tuple struct constructor \ with private fields"); } } } _ => {} } visit::walk_expr(self, expr); } fn visit_pat(&mut self, pattern: &ast::Pat) { // Foreign functions do not have their patterns mapped in the def_map, // and there's nothing really relevant there anyway, so don't bother // checking privacy. If you can name the type then you can pass it to an // external C function anyway. if self.in_foreign { return } match pattern.node { ast::PatStruct(_, ref fields, _) => { let adt = self.tcx.pat_ty(pattern).ty_adt_def().unwrap(); let def = self.tcx.def_map.borrow().get(&pattern.id).unwrap().full_def(); let variant = adt.variant_of_def(def); for field in fields { self.check_field(pattern.span, adt, variant, NamedField(field.node.ident.name)); } } // Patterns which bind no fields are allowable (the path is check // elsewhere). ast::PatEnum(_, Some(ref fields)) => { match self.tcx.pat_ty(pattern).sty { ty::TyStruct(def, _) => { for (i, field) in fields.iter().enumerate() { if let ast::PatWild(..) = field.node { continue } self.check_field(field.span, def, def.struct_variant(), UnnamedField(i)); } } ty::TyEnum(..) => { // enum fields have no privacy at this time } _ => {} } } _ => {} } visit::walk_pat(self, pattern); } fn visit_foreign_item(&mut self, fi: &ast::ForeignItem) { self.in_foreign = true; visit::walk_foreign_item(self, fi); self.in_foreign = false; } fn visit_path(&mut self, path: &ast::Path, id: ast::NodeId) { self.check_path(path.span, id, path.segments.last().unwrap().identifier.name); visit::walk_path(self, path); } } //////////////////////////////////////////////////////////////////////////////// /// The privacy sanity check visitor, ensures unnecessary visibility isn't here //////////////////////////////////////////////////////////////////////////////// struct SanePrivacyVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, in_fn: bool, } impl<'a, 'tcx, 'v> Visitor<'v> for SanePrivacyVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &ast::Item) { if self.in_fn { self.check_all_inherited(item); } else { self.check_sane_privacy(item); } let in_fn = self.in_fn; let orig_in_fn = replace(&mut self.in_fn, match item.node { ast::ItemMod(..) => false, // modules turn privacy back on _ => in_fn, // otherwise we inherit }); visit::walk_item(self, item); self.in_fn = orig_in_fn; } fn visit_fn(&mut self, fk: visit::FnKind<'v>, fd: &'v ast::FnDecl, b: &'v ast::Block, s: Span, _: ast::NodeId) { // This catches both functions and methods let orig_in_fn = replace(&mut self.in_fn, true); visit::walk_fn(self, fk, fd, b, s); self.in_fn = orig_in_fn; } } impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { /// Validates all of the visibility qualifiers placed on the item given. This /// ensures that there are no extraneous qualifiers that don't actually do /// anything. In theory these qualifiers wouldn't parse, but that may happen /// later on down the road... fn check_sane_privacy(&self, item: &ast::Item) { let tcx = self.tcx; let check_inherited = |sp: Span, vis: ast::Visibility, note: &str| { if vis != ast::Inherited { tcx.sess.span_err(sp, "unnecessary visibility qualifier"); if !note.is_empty() { tcx.sess.span_note(sp, note); } } }; match item.node { // implementations of traits don't need visibility qualifiers because // that's controlled by having the trait in scope. ast::ItemImpl(_, _, _, Some(..), _, ref impl_items) => { check_inherited(item.span, item.vis, "visibility qualifiers have no effect on trait \ impls"); for impl_item in impl_items { check_inherited(impl_item.span, impl_item.vis, ""); } } ast::ItemImpl(..) => { check_inherited(item.span, item.vis, "place qualifiers on individual methods instead"); } ast::ItemForeignMod(..) => { check_inherited(item.span, item.vis, "place qualifiers on individual functions \ instead"); } ast::ItemEnum(ref def, _) => { for v in &def.variants { match v.node.vis { ast::Public => { if item.vis == ast::Public { tcx.sess.span_err(v.span, "unnecessary `pub` \ visibility"); } } ast::Inherited => {} } } } ast::ItemTrait(..) | ast::ItemDefaultImpl(..) | ast::ItemConst(..) | ast::ItemStatic(..) | ast::ItemStruct(..) | ast::ItemFn(..) | ast::ItemMod(..) | ast::ItemTy(..) | ast::ItemExternCrate(_) | ast::ItemUse(_) | ast::ItemMac(..) => {} } } /// When inside of something like a function or a method, visibility has no /// control over anything so this forbids any mention of any visibility fn check_all_inherited(&self, item: &ast::Item) { let tcx = self.tcx; fn check_inherited(tcx: &ty::ctxt, sp: Span, vis: ast::Visibility) { if vis != ast::Inherited { tcx.sess.span_err(sp, "visibility has no effect inside functions"); } } let check_struct = |def: &ast::StructDef| { for f in &def.fields { match f.node.kind { ast::NamedField(_, p) => check_inherited(tcx, f.span, p), ast::UnnamedField(..) => {} } } }; check_inherited(tcx, item.span, item.vis); match item.node { ast::ItemImpl(_, _, _, _, _, ref impl_items) => { for impl_item in impl_items { match impl_item.node { ast::MethodImplItem(..) => { check_inherited(tcx, impl_item.span, impl_item.vis); } _ => {} } } } ast::ItemForeignMod(ref fm) => { for i in &fm.items { check_inherited(tcx, i.span, i.vis); } } ast::ItemEnum(ref def, _) => { for v in &def.variants { check_inherited(tcx, v.span, v.node.vis); } } ast::ItemStruct(ref def, _) => check_struct(&**def), ast::ItemExternCrate(_) | ast::ItemUse(_) | ast::ItemTrait(..) | ast::ItemDefaultImpl(..) | ast::ItemStatic(..) | ast::ItemConst(..) | ast::ItemFn(..) | ast::ItemMod(..) | ast::ItemTy(..) | ast::ItemMac(..) => {} } } } struct VisiblePrivateTypesVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, exported_items: &'a ExportedItems, public_items: &'a PublicItems, in_variant: bool, } struct CheckTypeForPrivatenessVisitor<'a, 'b: 'a, 'tcx: 'b> { inner: &'a VisiblePrivateTypesVisitor<'b, 'tcx>, /// whether the type refers to private types. contains_private: bool, /// whether we've recurred at all (i.e. if we're pointing at the /// first type on which visit_ty was called). at_outer_type: bool, // whether that first type is a public path. outer_type_is_public_path: bool, } impl<'a, 'tcx> VisiblePrivateTypesVisitor<'a, 'tcx> { fn path_is_private_type(&self, path_id: ast::NodeId) -> bool { let did = match self.tcx.def_map.borrow().get(&path_id).map(|d| d.full_def()) { // `int` etc. (None doesn't seem to occur.) None | Some(def::DefPrimTy(..)) => return false, Some(def) => def.def_id(), }; // A path can only be private if: // it's in this crate... if !is_local(did) { return false } // .. and it corresponds to a private type in the AST (this returns // None for type parameters) match self.tcx.map.find(did.node) { Some(ast_map::NodeItem(ref item)) => item.vis != ast::Public, Some(_) | None => false, } } fn trait_is_public(&self, trait_id: ast::NodeId) -> bool { // FIXME: this would preferably be using `exported_items`, but all // traits are exported currently (see `EmbargoVisitor.exported_trait`) self.public_items.contains(&trait_id) } fn check_ty_param_bound(&self, ty_param_bound: &ast::TyParamBound) { if let ast::TraitTyParamBound(ref trait_ref, _) = *ty_param_bound { if !self.tcx.sess.features.borrow().visible_private_types && self.path_is_private_type(trait_ref.trait_ref.ref_id) { let span = trait_ref.trait_ref.path.span; self.tcx.sess.span_err(span, "private trait in exported type \ parameter bound"); } } } fn item_is_public(&self, id: &ast::NodeId, vis: ast::Visibility) -> bool { self.exported_items.contains(id) || vis == ast::Public } } impl<'a, 'b, 'tcx, 'v> Visitor<'v> for CheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> { fn visit_ty(&mut self, ty: &ast::Ty) { if let ast::TyPath(..) = ty.node { if self.inner.path_is_private_type(ty.id) { self.contains_private = true; // found what we're looking for so let's stop // working. return } else if self.at_outer_type { self.outer_type_is_public_path = true; } } self.at_outer_type = false; visit::walk_ty(self, ty) } // don't want to recurse into [, .. expr] fn visit_expr(&mut self, _: &ast::Expr) {} } impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &ast::Item) { match item.node { // contents of a private mod can be reexported, so we need // to check internals. ast::ItemMod(_) => {} // An `extern {}` doesn't introduce a new privacy // namespace (the contents have their own privacies). ast::ItemForeignMod(_) => {} ast::ItemTrait(_, _, ref bounds, _) => { if !self.trait_is_public(item.id) { return } for bound in bounds.iter() { self.check_ty_param_bound(bound) } } // impls need some special handling to try to offer useful // error messages without (too many) false positives // (i.e. we could just return here to not check them at // all, or some worse estimation of whether an impl is // publicly visible). ast::ItemImpl(_, _, ref g, ref trait_ref, ref self_, ref impl_items) => { // `impl [... for] Private` is never visible. let self_contains_private; // impl [... for] Public<...>, but not `impl [... for] // Vec<Public>` or `(Public,)` etc. let self_is_public_path; // check the properties of the Self type: { let mut visitor = CheckTypeForPrivatenessVisitor { inner: self, contains_private: false, at_outer_type: true, outer_type_is_public_path: false, }; visitor.visit_ty(&**self_); self_contains_private = visitor.contains_private; self_is_public_path = visitor.outer_type_is_public_path; } // miscellaneous info about the impl // `true` iff this is `impl Private for ...`. let not_private_trait = trait_ref.as_ref().map_or(true, // no trait counts as public trait |tr| { let did = self.tcx.trait_ref_to_def_id(tr); !is_local(did) || self.trait_is_public(did.node) }); // `true` iff this is a trait impl or at least one method is public. // // `impl Public { $( fn ...() {} )* }` is not visible. // // This is required over just using the methods' privacy // directly because we might have `impl<T: Foo<Private>> ...`, // and we shouldn't warn about the generics if all the methods // are private (because `T` won't be visible externally). let trait_or_some_public_method = trait_ref.is_some() || impl_items.iter() .any(|impl_item| { match impl_item.node { ast::ConstImplItem(..) | ast::MethodImplItem(..) => { self.exported_items.contains(&impl_item.id) } ast::TypeImplItem(_) | ast::MacImplItem(_) => false, } }); if !self_contains_private && not_private_trait && trait_or_some_public_method { visit::walk_generics(self, g); match *trait_ref { None => { for impl_item in impl_items { // This is where we choose whether to walk down // further into the impl to check its items. We // should only walk into public items so that we // don't erroneously report errors for private // types in private items. match impl_item.node { ast::ConstImplItem(..) | ast::MethodImplItem(..) if self.item_is_public(&impl_item.id, impl_item.vis) => { visit::walk_impl_item(self, impl_item) } ast::TypeImplItem(..) => { visit::walk_impl_item(self, impl_item) } _ => {} } } } Some(ref tr) => { // Any private types in a trait impl fall into three // categories. // 1. mentioned in the trait definition // 2. mentioned in the type params/generics // 3. mentioned in the associated types of the impl // // Those in 1. can only occur if the trait is in // this crate and will've been warned about on the // trait definition (there's no need to warn twice // so we don't check the methods). // // Those in 2. are warned via walk_generics and this // call here. visit::walk_path(self, &tr.path); // Those in 3. are warned with this call. for impl_item in impl_items { if let ast::TypeImplItem(ref ty) = impl_item.node { self.visit_ty(ty); } } } } } else if trait_ref.is_none() && self_is_public_path { // impl Public<Private> { ... }. Any public static // methods will be visible as `Public::foo`. let mut found_pub_static = false; for impl_item in impl_items { match impl_item.node { ast::ConstImplItem(..) => { if self.item_is_public(&impl_item.id, impl_item.vis) { found_pub_static = true; visit::walk_impl_item(self, impl_item); } } ast::MethodImplItem(ref sig, _) => { if sig.explicit_self.node == ast::SelfStatic && self.item_is_public(&impl_item.id, impl_item.vis) { found_pub_static = true; visit::walk_impl_item(self, impl_item); } } _ => {} } } if found_pub_static { visit::walk_generics(self, g) } } return } // `type ... = ...;` can contain private types, because // we're introducing a new name. ast::ItemTy(..) => return, // not at all public, so we don't care _ if !self.item_is_public(&item.id, item.vis) => { return; } _ => {} } // We've carefully constructed it so that if we're here, then // any `visit_ty`'s will be called on things that are in // public signatures, i.e. things that we're interested in for // this visitor. debug!("VisiblePrivateTypesVisitor entering item {:?}", item); visit::walk_item(self, item); } fn visit_generics(&mut self, generics: &ast::Generics) { for ty_param in generics.ty_params.iter() { for bound in ty_param.bounds.iter() { self.check_ty_param_bound(bound) } } for predicate in &generics.where_clause.predicates { match predicate { &ast::WherePredicate::BoundPredicate(ref bound_pred) => { for bound in bound_pred.bounds.iter() { self.check_ty_param_bound(bound) } } &ast::WherePredicate::RegionPredicate(_) => {} &ast::WherePredicate::EqPredicate(ref eq_pred) => { self.visit_ty(&*eq_pred.ty); } } } } fn visit_foreign_item(&mut self, item: &ast::ForeignItem) { if self.exported_items.contains(&item.id) { visit::walk_foreign_item(self, item) } } fn visit_ty(&mut self, t: &ast::Ty) { debug!("VisiblePrivateTypesVisitor checking ty {:?}", t); if let ast::TyPath(_, ref p) = t.node { if !self.tcx.sess.features.borrow().visible_private_types && self.path_is_private_type(t.id) { self.tcx.sess.span_err(p.span, "private type in exported type signature"); } } visit::walk_ty(self, t) } fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) { if self.exported_items.contains(&v.node.id) { self.in_variant = true; visit::walk_variant(self, v, g); self.in_variant = false; } } fn visit_struct_field(&mut self, s: &ast::StructField) { match s.node.kind { ast::NamedField(_, vis) if vis == ast::Public || self.in_variant => { visit::walk_struct_field(self, s); } _ => {} } }<|fim▁hole|> // (Making them no-ops stops us from traversing the whole AST without // having to be super careful about our `walk_...` calls above.) fn visit_block(&mut self, _: &ast::Block) {} fn visit_expr(&mut self, _: &ast::Expr) {} } pub fn check_crate(tcx: &ty::ctxt, export_map: &def::ExportMap, external_exports: ExternalExports) -> (ExportedItems, PublicItems) { let krate = tcx.map.krate(); // Figure out who everyone's parent is let mut visitor = ParentVisitor { parents: NodeMap(), curparent: ast::DUMMY_NODE_ID, }; visit::walk_crate(&mut visitor, krate); // Use the parent map to check the privacy of everything let mut visitor = PrivacyVisitor { curitem: ast::DUMMY_NODE_ID, in_foreign: false, tcx: tcx, parents: visitor.parents, external_exports: external_exports, }; visit::walk_crate(&mut visitor, krate); // Sanity check to make sure that all privacy usage and controls are // reasonable. let mut visitor = SanePrivacyVisitor { in_fn: false, tcx: tcx, }; visit::walk_crate(&mut visitor, krate); tcx.sess.abort_if_errors(); // Build up a set of all exported items in the AST. This is a set of all // items which are reachable from external crates based on visibility. let mut visitor = EmbargoVisitor { tcx: tcx, exported_items: NodeSet(), public_items: NodeSet(), reexports: NodeSet(), export_map: export_map, prev_exported: true, prev_public: true, }; loop { let before = visitor.exported_items.len(); visit::walk_crate(&mut visitor, krate); if before == visitor.exported_items.len() { break } } let EmbargoVisitor { exported_items, public_items, .. } = visitor; { let mut visitor = VisiblePrivateTypesVisitor { tcx: tcx, exported_items: &exported_items, public_items: &public_items, in_variant: false, }; visit::walk_crate(&mut visitor, krate); } return (exported_items, public_items); }<|fim▁end|>
// we don't need to introspect into these at all: an // expression/block context can't possibly contain exported things.
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import json import sys import web from coloredcoinlib import BlockchainState, ColorDefinition blockchainstate = BlockchainState.from_url(None, True) urls = ( '/tx', 'Tx', '/prefetch', 'Prefetch', ) class ErrorThrowingRequestProcessor: def require(self, data, key, message): value = data.get(key) if not value: raise web.HTTPError("400 Bad request", <|fim▁hole|> {"content-type": "text/plain"}, message) class Tx(ErrorThrowingRequestProcessor): def POST(self): # data is sent in as json data = json.loads(web.input().keys()[0]) self.require(data, 'txhash', "TX requires txhash") txhash = data.get('txhash') return blockchainstate.get_raw(txhash) class Prefetch(ErrorThrowingRequestProcessor): def POST(self): # data is sent in as json data = json.loads(web.input().keys()[0]) self.require(data, 'txhash', "Prefetch requires txhash") self.require(data, 'output_set', "Prefetch requires output_set") self.require(data, 'color_desc', "Prefetch requires color_desc") txhash = data.get('txhash') output_set = data.get('output_set') color_desc = data.get('color_desc') limit = data.get('limit') color_def = ColorDefinition.from_color_desc(17, color_desc) tx_lookup = {} def process(current_txhash, current_outindex): """For any tx out, process the colorvalues of the affecting inputs first and then scan that tx. """ if limit and len(tx_lookup) > limit: return if tx_lookup.get(current_txhash): return current_tx = blockchainstate.get_tx(current_txhash) if not current_tx: return tx_lookup[current_txhash] = blockchainstate.get_raw(current_txhash) # note a genesis tx will simply have 0 affecting inputs inputs = set() inputs = inputs.union( color_def.get_affecting_inputs(current_tx, [current_outindex])) for i in inputs: process(i.prevout.hash, i.prevout.n) for oi in output_set: process(txhash, oi) return tx_lookup if __name__ == "__main__": app = web.application(urls, globals()) app.run()<|fim▁end|>
<|file_name|>lint-expr-stmt-attrs-for-early-lints.rs<|end_file_name|><|fim▁begin|>// run-pass #![feature(stmt_expr_attributes)] #![deny(unused_parens)] // Tests that lint attributes on statements/expressions are // correctly applied to non-builtin early (AST) lints <|fim▁hole|> } }<|fim▁end|>
fn main() { #[allow(unused_parens)] { let _ = (9);
<|file_name|>kartograph-script.py<|end_file_name|><|fim▁begin|>#!C:\Users\SeanSaito\Dev\aviato\flask\Scripts\python.exe<|fim▁hole|> if __name__ == '__main__': sys.exit( load_entry_point('kartograph.py==0.6.8', 'console_scripts', 'kartograph')() )<|fim▁end|>
# EASY-INSTALL-ENTRY-SCRIPT: 'kartograph.py==0.6.8','console_scripts','kartograph' __requires__ = 'kartograph.py==0.6.8' import sys from pkg_resources import load_entry_point
<|file_name|>jmg.js<|end_file_name|><|fim▁begin|>/* JMG (Javascript Mini GUI) (c) Roberto Lopez <jmg.contact.box *at* gmail.com> The MIT License (MIT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ //------------------------------------------------------------------------------------------------------------------------ // public variables //------------------------------------------------------------------------------------------------------------------------ var WIDGETS = []; var CURRENTPAGE = ""; var STYLE='desktop'; var MAX_DIALOG_WIDTH = 700; //------------------------------------------------------------------------------------------------------------------------ // showPage( id ) //------------------------------------------------------------------------------------------------------------------------ function showPage( id ) { $( ":mobile-pagecontainer" ).pagecontainer( "change", "#" + id ); } //------------------------------------------------------------------------------------------------------------------------ // setPageStyle( style ) //------------------------------------------------------------------------------------------------------------------------ function setPageStyle( newStyle , width ) { STYLE = newStyle; if ( newStyle == "auto" || newStyle == "desktop" ) { if ( typeof(width) == "undefined" ) { MAX_DIALOG_WIDTH = 700; } else { MAX_DIALOG_WIDTH = width; } } } //------------------------------------------------------------------------------------------------------------------------ // doMethod //------------------------------------------------------------------------------------------------------------------------ function doMethod( id , method , value ) { var string = ''; var datarole = $('#' + id ).data('role'); if ( method == 'addRow' ) { string += '<tr style="height:1.5em;" >'; string += '<td class="column0" style="border: 1px solid rgb(192,192,192); border-collapse: collapse;"> ' + '' + '<input type="checkbox" onclick="changeColor(this)" >' + '' + ' </td>'; for( var n = 0; n < value.length; n++ ) { string += '<td style="border: 1px solid rgb(192,192,192); border-collapse: collapse;">' + value [n] + '</td>'; } string += '</tr>'; $('#'+id).append( string ); if ( datarole == 'table' ) { $("#"+id).table("refresh"); } } else if ( method == 'deleteRow' ) { document.getElementById( id ).deleteRow(value); } else if ( method == 'selectRow' ) { document.getElementById(id).rows[value].cells[0].getElementsByTagName("input")[0].checked = true ; } else if ( method == 'unSelectRow' ) { document.getElementById(id).rows[value].cells[0].getElementsByTagName("input")[0].checked = false ; } else if ( method == 'load' ) { doMethod(id,'deleteAllRows'); if ( typeof( value ) == 'string' ) { var jsonData = value; var arr = JSON.parse( jsonData ); } else if ( typeof( value ) == 'object' ) { var arr = value; } else { alert('Error (load method): Invalid Type'); } var l = arr[0].length; for( var i=0; i < arr.length; i ++ ) { var row = []; for ( var j=0 ; j<l ; j++ ) { row.push(arr[i][j]); } doMethod(id,'addRow', row ) } if ( datarole == 'table' ) { $("#"+id).table("refresh"); } } else if ( method == 'deleteAllRows' ) { while ( get(id,'rowCount') > 0 ) { doMethod( id, 'deleteRow' , 1 ); } } } //------------------------------------------------------------------------------------------------------------------------ // get //------------------------------------------------------------------------------------------------------------------------ function get( id , property , value1 , value2 ) { var datarole = $('#' + id ).data('role'); var retval; if ( datarole == 'flipswitch' || datarole == 'text' || datarole == 'textarea' || datarole == 'button' || datarole == 'number' || datarole == 'date' || datarole == 'select' ) { var string = 'document.getElementById("' + id + '").' + property retval = eval(string) ; } else if ( datarole == 'std-table' || datarole == 'table' ) { if ( property == 'rowCount' ) { retval = document.getElementById(id).rows.length - 1; } else if ( property == 'selectedRowCount' ) { var i, n ; n = 0 ; for ( i=1 ; i < document.getElementById(id).rows.length ; i++ ) { if ( document.getElementById(id).rows[i].cells[0].getElementsByTagName("input")[0].checked == true ) { n++ ; } } retval = n; } else if ( property == 'selectedRows' ) { var selectedRows=new Array(); var i, code, first, last, n ; n = 0 ; for ( i=1 ; i < document.getElementById(id).rows.length ; i++ ) { if ( document.getElementById(id).rows[i].cells[0].getElementsByTagName("input")[0].checked == true ) { selectedRows[n] = i ; n++ ; } } retval = selectedRows; } else if ( property == 'cell' ) { retval = document.getElementById(id).rows[value1].cells[value2].childNodes[0].data; } else { alert('Property Not Supported'); } } else { if ( $('#' + id + '0' ).data('role') == 'radio' ) { var radios = document.getElementsByName(id); var radios_value; for(var i = 0; i < radios.length; i++) { if(radios[i].checked) { retval = i; break; } } } else { alert('error: widget not supported (01)'); } } return retval ; } //------------------------------------------------------------------------------------------------------------------------ // set( id , property , value , value2 , value3 ) //------------------------------------------------------------------------------------------------------------------------ function set( id , property , value , value2 , value3) { var datarole = $('#' + id ).data('role'); var string; if ( datarole == 'flipswitch' ) { if ( property == 'value' ) { $('#'+id ).val( value ).flipswitch( "refresh" ) ; } else { setStandard(id , property , value); } } else if ( datarole == 'button' ) { if ( property == 'value') { $("#"+id).val(value); $("#"+id).button("refresh"); } else { setStandard(id , property , value); } } else if ( datarole == 'text' || datarole == 'number' || datarole == 'date' || datarole == 'textarea' ) { setStandard(id , property , value); } else if ( datarole == 'select' ) { if ( property == 'value') { setStandard(id , 'selectedIndex' , value); $("#"+id).selectmenu('refresh'); } } else if ( datarole == 'std-table' || datarole == 'table' ) { if ( property == 'cell') { document.getElementById(id).rows[value].cells[value2].childNodes[0].data = value3 ; } } else { if ( $('#' + id + '0' ).data('role') == 'radio' ) { if ( property == 'value') { $( '#' + id + value ).prop("checked","checked").checkboxradio("refresh"); $("input[type='radio']").checkboxradio("refresh"); } } else { alert('error: widget not supported(02)'); } } } //------------------------------------------------------------------------------------------------------------------------ // beginGrid //------------------------------------------------------------------------------------------------------------------------ function beginGrid() { WIDGETS.push( [ "begingrid" ] ); } //------------------------------------------------------------------------------------------------------------------------ // endGrid //------------------------------------------------------------------------------------------------------------------------ function endGrid() { WIDGETS.push( [ "endgrid" ] ); } //------------------------------------------------------------------------------------------------------------------------ // beginBlock //------------------------------------------------------------------------------------------------------------------------ function beginBlock() { WIDGETS.push( [ "beginblock" ] ); } //------------------------------------------------------------------------------------------------------------------------ // endBlock //------------------------------------------------------------------------------------------------------------------------ function endBlock() { WIDGETS.push( [ "endblock" ] ); } <|fim▁hole|>function text( prop ) { checkDup(prop['id']); WIDGETS.push( [ "text" , prop['id' ] , prop['label'] , prop['default'] , prop['placeholder'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // textArea //------------------------------------------------------------------------------------------------------------------------ function textArea( prop ) { checkDup(prop['id']); WIDGETS.push( [ "textarea" , prop['id' ] , prop['label'] , prop['default'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // date //------------------------------------------------------------------------------------------------------------------------ function date( prop ) { checkDup(prop['id']); WIDGETS.push( [ "date" , prop['id' ] , prop['label'] , prop['default'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // number //------------------------------------------------------------------------------------------------------------------------ function number( prop ) { checkDup(prop['id']); WIDGETS.push( [ "number" , prop['id' ] , prop['label'] , prop['default'] , prop['placeholder'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // table //------------------------------------------------------------------------------------------------------------------------ function table( prop ) { checkDup(prop['id']); WIDGETS.push( [ "table" , prop['id' ] , prop['headers'] , prop['style'] , prop['height'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // button //------------------------------------------------------------------------------------------------------------------------ function button( prop ) { checkDup(prop['id']); WIDGETS.push( [ "button" , prop["id"] , prop["caption"] , prop["action"] , prop["inline"] , prop["iconType"] , prop["iconPos"] ] ); } //------------------------------------------------------------------------------------------------------------------------ // flipswitch //------------------------------------------------------------------------------------------------------------------------ function flipswitch ( prop ) { checkDup(prop['id']); WIDGETS.push( ["flipswitch" , prop['id'] , prop['label'] , prop['onValue'] , prop['offValue'] , prop['default'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // radio //------------------------------------------------------------------------------------------------------------------------ function radio ( prop ) { checkDup(prop['id']); WIDGETS.push( ["radio" , prop['id'] , prop['options'] , prop['default'] , prop['label'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // select //------------------------------------------------------------------------------------------------------------------------ function select ( prop ) { checkDup(prop['id']); WIDGETS.push( ["select" , prop['id'] , prop['options'] , prop['label'] , prop['default'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // header //------------------------------------------------------------------------------------------------------------------------ function header( prop ) { checkDup(prop['id']); WIDGETS.push( [ "header" , prop['id'] , prop['caption'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // footer //------------------------------------------------------------------------------------------------------------------------ function footer( prop ) { checkDup(prop['id']); WIDGETS.push( [ "header" , prop['id'] , prop['caption'] ] ); } //------------------------------------------------------------------------------------------------------------------------ // beginPage //------------------------------------------------------------------------------------------------------------------------ function beginPage( id ) { checkDup(id); CURRENTPAGE = id; } //------------------------------------------------------------------------------------------------------------------------ // endPage //------------------------------------------------------------------------------------------------------------------------ function endPage() { var currentRole = ''; var blockCount = 0; var closedBlocks = 0; //---------------------------------------------------------------------- // Page Style Parser //---------------------------------------------------------------------- if ( STYLE == 'desktop' ) { var styleString = 'data-dialog="true" '; currentRole = 'dialog'; } else if ( STYLE == 'mobile' ) { var styleString = 'data-dialog="false"'; currentRole = 'page'; } else if ( STYLE == 'auto' ) { if (window.screen.availWidth > MAX_DIALOG_WIDTH ) { var styleString = 'data-dialog="true"'; currentRole = 'dialog'; } else { var styleString = 'data-dialog="false"'; currentRole = 'page'; } } else { alert('error: Invalid Style'); return; } //---------------------------------------------------------------------- // Widget Parser //---------------------------------------------------------------------- var contentString = ''; for ( var i = 0 ; i < WIDGETS.length ; i++ ) { //---------------------------- // flipswitch //---------------------------- // 0 -> Widget Type ('flipswitch') // 1 -> id // 2 -> Label // 3 -> On Value // 4 -> Off Value // 5 -> Default //---------------------------- if ( WIDGETS[i][0] == 'flipswitch' ) { var selected1 = ''; var selected2 = ''; if ( WIDGETS[i][4] == WIDGETS[i][5] ) { selected1 = 'selected'; } else if ( WIDGETS[i][3] == WIDGETS[i][5] ) { selected2 = 'selected'; } else { if ( typeof( WIDGETS[i][5] ) == 'string' ) { alert('Error (FlipSwitch): Invalid default value'); return; } else { selected1 = 'selected'; } } if ( typeof( WIDGETS[i][2] ) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1] + '">' + WIDGETS[i][2] + '</label>'; } contentString += '<select id="' + WIDGETS[i][1] +'" data-role="flipswitch">'; contentString += '<option ' + selected1 + ' value="' + WIDGETS[i][4] + '">' + WIDGETS[i][4] + '</option>'; contentString += '<option ' + selected2 + ' value="' + WIDGETS[i][3] + '">' + WIDGETS[i][3] + '</option>'; contentString += '</select>'; } //---------------------------- // button //---------------------------- // 0 -> Widget Type ('button') // 1 -> id // 2 -> Caption // 3 -> Action // 4 -> Inline // 5 -> Icon Type // 6 -> Icon Position //---------------------------- else if ( WIDGETS[i][0] == 'button' ) { if ( WIDGETS[i][4] == true ) { var inline = ' data-inline="true" '; } else { var inline = ''; } if ( typeof(WIDGETS[i][5]) == 'string' ) { var iconType = ' data-icon="' + WIDGETS[i][5] + '"' ; } else { var iconType = ''; } if ( typeof(WIDGETS[i][6]) == 'string' ) { var iconPos = ' data-iconpos="' + WIDGETS[i][6] + '"' ; } else { var iconPos = ''; } contentString += '<input type="button" data-role="button" ' + iconType + iconPos + ' value="' + WIDGETS[i][2] + '" onclick="' + WIDGETS[i][3] + '" id="' + WIDGETS[i][1] + '"' + inline + '>' ; } //---------------------------- // beginGrid //---------------------------- // 0 -> Widget Type ('begingrid') //---------------------------- else if ( WIDGETS[i][0] == 'begingrid' ) { contentString += '_JMG_BEGIN_GRID_PLACEHOLDER_' ; } //---------------------------- // endGrid //---------------------------- // 0 -> Widget Type ('endgrid') //---------------------------- else if ( WIDGETS[i][0] == 'endgrid' ) { contentString += '</div>' ; if ( blockCount == 1 ) { contentString = contentString.replace( '_JMG_BEGIN_GRID_PLACEHOLDER_' , '<div class="ui-grid-solo ui-responsive">' ); } else if ( blockCount == 2 ) { contentString = contentString.replace( '_JMG_BEGIN_GRID_PLACEHOLDER_' , '<div class="ui-grid-a ui-responsive">' ); } else if ( blockCount == 3 ) { contentString = contentString.replace( '_JMG_BEGIN_GRID_PLACEHOLDER_' , '<div class="ui-grid-b ui-responsive">' ); } else if ( blockCount == 4 ) { contentString = contentString.replace( '_JMG_BEGIN_GRID_PLACEHOLDER_' , '<div class="ui-grid-c ui-responsive">' ); } else if ( blockCount == 5 ) { contentString = contentString.replace( '_JMG_BEGIN_GRID_PLACEHOLDER_' , '<div class="ui-grid-d ui-responsive">' ); } else { alert('ERROR: Maximun number of blocks!'); return; } } //---------------------------- // beginBlock //---------------------------- // 0 -> Widget Type ('beginblock') //---------------------------- else if ( WIDGETS[i][0] == 'beginblock' ) { blockCount++; if ( blockCount == 1 ) { contentString += '<div class="ui-block-a">' ; } else if ( blockCount == 2 ) { contentString += '<div class="ui-block-b">' ; } else if ( blockCount == 3 ) { contentString += '<div class="ui-block-c">' ; } else if ( blockCount == 4 ) { contentString += '<div class="ui-block-d">' ; } else if ( blockCount == 5 ) { contentString += '<div class="ui-block-e">' ; } else { alert('ERROR: Maximun number of blocks!'); return; } } //---------------------------- // endBlock //---------------------------- // 0 -> Widget Type ('endblock') //---------------------------- else if ( WIDGETS[i][0] == 'endblock' ) { contentString += '</div>' ; } //---------------------------- // header //---------------------------- // 0 -> Widget Type ('header') // 1 -> id // 2 -> Caption //---------------------------- else if ( WIDGETS[i][0] == 'header' ) { contentString += '<div data-role="header" id="' + WIDGETS[i][1] + '"> ' ; contentString += '<h1>' + WIDGETS[i][2] + '</h1>'; contentString += '</div>' ; } //---------------------------- // footer //---------------------------- // 0 -> Widget Type ('footer') // 1 -> id // 2 -> Caption //---------------------------- else if ( WIDGETS[i][0] == 'footer' ) { contentString += '<div data-role="footer" id="' + WIDGETS[i][1] + '"> ' ; contentString += '<h1>' + WIDGETS[i][2] + '</h1>'; contentString += '</div>' ; } //---------------------------- // text //---------------------------- // 0 -> Widget Type ('text') // 1 -> id // 2 -> Label // 3 -> Default // 4 -> Placeholder //---------------------------- else if ( WIDGETS[i][0] == 'text' ) { if ( typeof( WIDGETS[i][3]) == 'string' ) { var value = 'value="' + WIDGETS[i][3] + '"'; } else { var value = ''; } if ( typeof( WIDGETS[i][4]) == 'string' ) { var placeholder = 'placeholder="' + WIDGETS[i][4] + '"'; } else { var placeholder = ''; } if ( typeof( WIDGETS[i][2]) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1] + '">' + WIDGETS[i][2] + '</label>' ; } contentString += '<input type="text" data-role="text" data-clear-btn="true" ' + placeholder + ' id="' + WIDGETS[i][1] + '" ' + value + ' >'; } //---------------------------- // textArea //---------------------------- // 0 -> Widget Type ('textarea') // 1 -> id // 2 -> Label // 3 -> Default //---------------------------- else if ( WIDGETS[i][0] == 'textarea' ) { if ( typeof( WIDGETS[i][2]) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1] + '">' + WIDGETS[i][2] + '</label>' ; } contentString += '<textarea data-role="textarea" id="' + WIDGETS[i][1] + '" >'; if ( typeof( WIDGETS[i][3]) == 'string' ) { contentString += WIDGETS[i][3]; } contentString += '</textarea>'; } //---------------------------- // number //---------------------------- // 0 -> Widget Type ('number') // 1 -> id // 2 -> Label // 3 -> default // 4 -> placeholder //---------------------------- else if ( WIDGETS[i][0] == 'number' ) { if ( typeof( WIDGETS[i][3]) == 'string' || typeof( WIDGETS[i][3]) == 'number' ) { var value = 'value="' + WIDGETS[i][3] + '"'; } else { var value = ''; } if ( typeof( WIDGETS[i][4]) == 'string' ) { var placeholder = 'placeholder="' + WIDGETS[i][4] + '"'; } else { var placeholder = ''; } if ( typeof( WIDGETS[i][2]) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1] + '">' + WIDGETS[i][2] + '</label>' ; } contentString += '<input type="number" data-clear-btn="true" data-role="number" ' + placeholder + ' id="' + WIDGETS[i][1] + '" ' + value + ' >'; } //---------------------------- // date //---------------------------- // 0 -> Widget Type ('date') // 1 -> id // 2 -> Label // 3 -> Default //---------------------------- else if ( WIDGETS[i][0] == 'date' ) { if ( typeof( WIDGETS[i][2]) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1] + '">' + WIDGETS[i][2] + '</label>' ; } if ( typeof( WIDGETS[i][3]) == 'string' ) { var value = ' value="' + WIDGETS[i][3] + '" ' ; } else { var value = '' ; } contentString += '<input type="date" data-clear-btn="true" data-role="date" id="' + WIDGETS[i][1] + '" ' + value + ' >'; } //---------------------------- // radio //---------------------------- // 0 -> Widget Type ('radio') // 1 -> id // 2 -> Options Array // 3 -> Default // 4 -> Label //---------------------------- else if ( WIDGETS[i][0] == 'radio' ) { var checked = ''; var options = WIDGETS[i][2]; if ( typeof( WIDGETS[i][4]) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1]+'L' + '">' + WIDGETS[i][4] + '</label>'; } contentString += '<fieldset data-role="controlgroup" id="' + WIDGETS[i][1]+'L' + '" >'; for( var n = 0; n < options.length; n++ ) { if ( WIDGETS[i][3] == n ) { checked = 'checked="checked"'; } else { checked = ''; } contentString += '<input type="radio" value="' + options[n] + '" name="' + WIDGETS[i][1] + '" id="' + WIDGETS[i][1]+n + '" data-role="radio" ' + checked + ' >'; contentString += '<label for="' + WIDGETS[i][1]+n + '">' + options[n] + '</label>'; } contentString += '</fieldset>'; } //---------------------------- // select //---------------------------- // 0 -> Widget Type ('select') // 1 -> id // 2 -> Options Array // 3 -> Label // 4 -> Default //---------------------------- else if ( WIDGETS[i][0] == 'select' ) { var selected = ''; var options = WIDGETS[i][2]; if ( typeof( WIDGETS[i][3]) == 'string' ) { contentString += '<label for="' + WIDGETS[i][1] + '" class="select">' + WIDGETS[i][3] + '</label>'; } contentString += '<select name="' + WIDGETS[i][1] + '" id="' + WIDGETS[i][1] + '"' + ' data-role="select" ' + '>'; for( var n = 0; n < options.length; n++ ) { if ( WIDGETS[i][4] == n ) { selected = 'selected'; } else { selected = ''; } contentString += '<option ' + selected + ' value="' + n + '">' + options[n] + '</option>'; } contentString += '</select>'; } //---------------------------- // table //---------------------------- // 0 -> Widget Type ('table') // 1 -> id // 2 -> headers // 3 -> style // 4 -> height //---------------------------- else if ( WIDGETS[i][0] == 'table' ) { if ( typeof( WIDGETS[i][3]) == 'string' ) { if ( WIDGETS[i][3] == 'standard' ) { var rolemode = ' data-role="std-table" '; } else if ( WIDGETS[i][3] == 'columntoggle' ) { var rolemode = ' data-role="table" data-mode="columntoggle" class="ui-responsive" '; } else if ( WIDGETS[i][3] == 'reflow' ) { var rolemode = ' data-role="table" '; } else { alert('error: Invalid table style'); return; } } else { var rolemode = ' data-role="std-table" '; } if ( typeof( WIDGETS[i][4]) == 'string' ) { var tableHeight = 'height: ' + WIDGETS[i][4] + ';' } else { var tableHeight = 'height: 16em;' } var headers = WIDGETS[i][2]; var tableStyle = 'style="' + tableHeight + 'overflow-y:scroll;border:1px solid rgb(192,192,192);border: 1px solid rgb(192,192,192); border-collapse: collapse;"'; contentString += '<div ' + tableStyle + ' >'; contentString += '<table ' + ' style="border: 1px solid rgb(192,192,192);" ' + rolemode + ' width="100%" id="' + WIDGETS[i][1] + '" >' ; contentString += '<thead style="height:2em;">'; contentString += '<tr>'; contentString += '<th class="column0" style="border: 1px solid rgb(192,192,192); border-collapse: collapse;" >' + '' + '</th>'; for( var n = 0; n < headers.length; n++ ) { contentString += '<th data-priority="' + n+1 + '" style="border: 1px solid rgb(192,192,192); border-collapse: collapse;" >' + headers [n] + '</th>'; } contentString += '</tr>'; contentString += '</thead>'; contentString += '<tbody>'; contentString += '</tbody>'; contentString += '</table>' ; contentString += '</div>' } } //---------------------------------------------------------------------- // add dialog width css to header //---------------------------------------------------------------------- var dialogStyle = ''; dialogStyle += '<style>'; dialogStyle += '.ui-dialog-contain {'; dialogStyle += 'width: 92.5%;'; dialogStyle += 'max-width: ' + MAX_DIALOG_WIDTH.toString() + 'px;' ; dialogStyle += 'margin: 10% auto 15px auto;'; dialogStyle += 'padding: 0;'; dialogStyle += 'position: relative;'; dialogStyle += 'top: -50px;'; dialogStyle += '}'; dialogStyle += 'input[type="checkbox"] {' dialogStyle += ' width: 1.5em;' dialogStyle += ' height:1.5em;' dialogStyle += ' padding: 0.5em;' dialogStyle += ' border: 1px solid #369;' dialogStyle += '}' dialogStyle += '.column0 {' dialogStyle += 'width: 2em;' dialogStyle += '}' dialogStyle += '.grid tr.selected' dialogStyle += '{' dialogStyle += ' color: rgb(255,255,255);' dialogStyle += ' background-color: rgb(100,100,100);' dialogStyle += '}' dialogStyle += '.grid tr.unselected' dialogStyle += '{' dialogStyle += ' color: rgb(0,0,0);' dialogStyle += ' background-color: rgb(255,255,255);' dialogStyle += '}' $('head').append( dialogStyle ); //---------------------------------------------------------------------- // add page to body //---------------------------------------------------------------------- $('#main_body').append('<div data-role="page" id="' + CURRENTPAGE + '" ' + styleString + '><div data-role="content">' + contentString + '</div></div>'); CURRENTPAGE = ""; WIDGETS = []; } ////////////////////////////////////////////////////////////////////////////////// // INTERNAL FUNCTIONS ////////////////////////////////////////////////////////////////////////////////// function setStandard(id , property , value) { if ( typeof(value) == "string" ) { var string = 'document.getElementById("' + id + '").' + property + ' = ' + '"' + value + '"' } else { var string = 'document.getElementById("' + id + '").' + property + ' = ' + value } eval(string); } //------------------------------------------------------------------------------------------------------------------------ function checkDup( id ) { var l = len = WIDGETS.length; for ( var i = 0, l ; i < len ; i++ ) { if ( WIDGETS[i][1] === id ) { alert('warning: id duplicated (' + id + ')' ); return; } } } //------------------------------------------------------------------------------------------------------------------------ function changeColor(o) { if( o.checked ) { o.parentNode.parentNode.style.backgroundColor='darkgray'; } else { o.parentNode.parentNode.style.backgroundColor='transparent'; } }<|fim▁end|>
//------------------------------------------------------------------------------------------------------------------------ // text //------------------------------------------------------------------------------------------------------------------------
<|file_name|>kinova_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The dm_control Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Tests for the Jaco arm class.""" import itertools import unittest from absl.testing import absltest from absl.testing import parameterized from dm_control import composer from dm_control import mjcf from dm_control.entities.manipulators import kinova from dm_control.entities.manipulators.kinova import jaco_arm from dm_control.mujoco.wrapper import mjbindings import numpy as np mjlib = mjbindings.mjlib class JacoArmTest(parameterized.TestCase): def test_can_compile_and_step_model(self): arm = kinova.JacoArm() physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) physics.step() def test_can_attach_hand(self): arm = kinova.JacoArm() hand = kinova.JacoHand() arm.attach(hand) physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) physics.step() # TODO(b/159974149): Investigate why the mass does not match the datasheet. @unittest.expectedFailure def test_mass(self): arm = kinova.JacoArm() physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) mass = physics.bind(arm.mjcf_model.worldbody).subtreemass expected_mass = 4.4 self.assertAlmostEqual(mass, expected_mass) @parameterized.parameters([ dict(actuator_index=0, control_input=0, expected_velocity=0.), dict(actuator_index=0, control_input=jaco_arm._LARGE_JOINT_MAX_VELOCITY, expected_velocity=jaco_arm._LARGE_JOINT_MAX_VELOCITY), dict(actuator_index=4, control_input=jaco_arm._SMALL_JOINT_MAX_VELOCITY, expected_velocity=jaco_arm._SMALL_JOINT_MAX_VELOCITY), dict(actuator_index=0, control_input=-jaco_arm._LARGE_JOINT_MAX_VELOCITY, expected_velocity=-jaco_arm._LARGE_JOINT_MAX_VELOCITY), dict(actuator_index=0, control_input=2*jaco_arm._LARGE_JOINT_MAX_VELOCITY, # Test clipping expected_velocity=jaco_arm._LARGE_JOINT_MAX_VELOCITY), ]) def test_velocity_actuation( self, actuator_index, control_input, expected_velocity): arm = kinova.JacoArm() physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) actuator = arm.actuators[actuator_index] bound_actuator = physics.bind(actuator) bound_joint = physics.bind(actuator.joint) acceleration_threshold = 1e-6 with physics.model.disable('contact', 'gravity'): bound_actuator.ctrl = control_input # Step until the joint has stopped accelerating. while abs(bound_joint.qacc) > acceleration_threshold: physics.step() self.assertAlmostEqual(bound_joint.qvel[0], expected_velocity, delta=0.01) @parameterized.parameters([ dict(joint_index=0, min_expected_torque=1.7, max_expected_torque=5.2), dict(joint_index=5, min_expected_torque=0.8, max_expected_torque=7.0)]) def test_backdriving_torque( self, joint_index, min_expected_torque, max_expected_torque): arm = kinova.JacoArm() physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) bound_joint = physics.bind(arm.joints[joint_index]) torque = min_expected_torque * 0.8 velocity_threshold = 0.1*2*np.pi/60. # 0.1 RPM torque_increment = 0.01 seconds_per_torque_increment = 1. max_torque = max_expected_torque * 1.1 while torque < max_torque: # Ensure that no other forces are acting on the arm. with physics.model.disable('gravity', 'contact', 'actuation'): # Reset the simulation so that the initial velocity is zero. physics.reset() bound_joint.qfrc_applied = torque while physics.time() < seconds_per_torque_increment: physics.step() if bound_joint.qvel[0] >= velocity_threshold: self.assertBetween(torque, min_expected_torque, max_expected_torque) return # If we failed to accelerate the joint to the target velocity within the # time limit we'll reset the simulation and increase the torque. torque += torque_increment self.fail('Torque of {} Nm insufficient to backdrive joint.'.format(torque)) @parameterized.parameters([ dict(joint_pos=0., expected_obs=[0., 1.]), dict(joint_pos=-0.5*np.pi, expected_obs=[-1., 0.]), dict(joint_pos=np.pi, expected_obs=[0., -1.]), dict(joint_pos=10*np.pi, expected_obs=[0., 1.])]) def test_joints_pos_observables(self, joint_pos, expected_obs): joint_index = 0 arm = kinova.JacoArm() physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) physics.bind(arm.joints).qpos[joint_index] = joint_pos actual_obs = arm.observables.joints_pos(physics)[joint_index] np.testing.assert_array_almost_equal(expected_obs, actual_obs) @parameterized.parameters( dict(joint_index=idx, applied_torque=t) for idx, t in itertools.product([0, 2, 4], [0., -6.8, 30.5])) def test_joints_torque_observables(self, joint_index, applied_torque): arm = kinova.JacoArm() joint = arm.joints[joint_index] physics = mjcf.Physics.from_mjcf_model(arm.mjcf_model) with physics.model.disable('gravity', 'limit', 'contact', 'actuation'): # Apply a cartesian torque to the body containing the joint. We use # `xfrc_applied` rather than `qfrc_applied` because forces in # `qfrc_applied` are not measured by the torque sensor). physics.bind(joint.parent).xfrc_applied[3:] = ( applied_torque * physics.bind(joint).xaxis) observed_torque = arm.observables.joints_torque(physics)[joint_index] # Note the change in sign, since the sensor measures torques in the # child->parent direction. self.assertAlmostEqual(observed_torque, -applied_torque, delta=0.1) class JacoHandTest(parameterized.TestCase): def test_can_compile_and_step_model(self): hand = kinova.JacoHand() physics = mjcf.Physics.from_mjcf_model(hand.mjcf_model) physics.step() # TODO(b/159974149): Investigate why the mass does not match the datasheet. @unittest.expectedFailure def test_hand_mass(self): hand = kinova.JacoHand()<|fim▁hole|> mass = physics.bind(hand.mjcf_model.worldbody).subtreemass expected_mass = 0.727 self.assertAlmostEqual(mass, expected_mass) def test_grip_force(self): arena = composer.Arena() hand = kinova.JacoHand() arena.attach(hand) # A sphere with a touch sensor for measuring grip force. prop_model = mjcf.RootElement(model='grip_target') prop_model.worldbody.add('geom', type='sphere', size=[0.02]) touch_site = prop_model.worldbody.add('site', type='sphere', size=[0.025]) touch_sensor = prop_model.sensor.add('touch', site=touch_site) prop = composer.ModelWrapperEntity(prop_model) # Add some slide joints to allow movement of the target in the XY plane. # This helps the contact solver to converge more reliably. prop_frame = arena.attach(prop) prop_frame.add('joint', name='slide_x', type='slide', axis=(1, 0, 0)) prop_frame.add('joint', name='slide_y', type='slide', axis=(0, 1, 0)) physics = mjcf.Physics.from_mjcf_model(arena.mjcf_model) bound_pinch_site = physics.bind(hand.pinch_site) bound_actuators = physics.bind(hand.actuators) bound_joints = physics.bind(hand.joints) bound_touch = physics.bind(touch_sensor) # Position the grip target at the pinch site. prop.set_pose(physics, position=bound_pinch_site.xpos) # Close the fingers with as much force as the actuators will allow. bound_actuators.ctrl = bound_actuators.ctrlrange[:, 1] # Run the simulation forward until the joints stop moving. physics.step() qvel_thresh = 1e-3 # radians / s while max(abs(bound_joints.qvel)) > qvel_thresh: physics.step() expected_min_grip_force = 20. expected_max_grip_force = 30. grip_force = bound_touch.sensordata self.assertBetween( grip_force, expected_min_grip_force, expected_max_grip_force, msg='Expected grip force to be between {} and {} N, got {} N.'.format( expected_min_grip_force, expected_max_grip_force, grip_force)) @parameterized.parameters([dict(opening=True), dict(opening=False)]) def test_finger_travel_time(self, opening): hand = kinova.JacoHand() physics = mjcf.Physics.from_mjcf_model(hand.mjcf_model) bound_actuators = physics.bind(hand.actuators) bound_joints = physics.bind(hand.joints) min_ctrl, max_ctrl = bound_actuators.ctrlrange.T min_qpos, max_qpos = bound_joints.range.T # Measure the time taken for the finger joints to traverse 99.9% of their # total range. qpos_tol = 1e-3 * (max_qpos - min_qpos) if opening: hand.set_grasp(physics=physics, close_factors=1.) # Fully closed. np.testing.assert_array_almost_equal(bound_joints.qpos, max_qpos) target_pos = min_qpos # Fully open. ctrl = min_ctrl # Open the fingers as fast as the actuators will allow. else: hand.set_grasp(physics=physics, close_factors=0.) # Fully open. np.testing.assert_array_almost_equal(bound_joints.qpos, min_qpos) target_pos = max_qpos # Fully closed. ctrl = max_ctrl # Close the fingers as fast as the actuators will allow. # Run the simulation until all joints have reached their target positions. bound_actuators.ctrl = ctrl while np.any(abs(bound_joints.qpos - target_pos) > qpos_tol): with physics.model.disable('gravity'): physics.step() expected_travel_time = 1.2 # Seconds. self.assertAlmostEqual(physics.time(), expected_travel_time, delta=0.1) @parameterized.parameters([ dict(pos=np.r_[0., 0., 0.3], quat=np.r_[0., 1., 0., 1.]), dict(pos=np.r_[0., -0.1, 0.5], quat=np.r_[1., 1., 0., 0.]), ]) def test_pinch_site_observables(self, pos, quat): arm = kinova.JacoArm() hand = kinova.JacoHand() arena = composer.Arena() arm.attach(hand) arena.attach(arm) physics = mjcf.Physics.from_mjcf_model(arena.mjcf_model) # Normalize the quaternion. quat /= np.linalg.norm(quat) # Drive the arm so that the pinch site is at the desired position and # orientation. success = arm.set_site_to_xpos( physics=physics, random_state=np.random.RandomState(0), site=hand.pinch_site, target_pos=pos, target_quat=quat) self.assertTrue(success) # Check that the observations are as expected. observed_pos = hand.observables.pinch_site_pos(physics) np.testing.assert_allclose(observed_pos, pos, atol=1e-3) observed_rmat = hand.observables.pinch_site_rmat(physics).reshape(3, 3) expected_rmat = np.empty((3, 3), np.double) mjlib.mju_quat2Mat(expected_rmat.ravel(), quat) difference_rmat = observed_rmat.dot(expected_rmat.T) # `difference_rmat` might not be perfectly orthonormal, which could lead to # an invalid value being passed to arccos. u, _, vt = np.linalg.svd(difference_rmat, full_matrices=False) ortho_difference_rmat = u.dot(vt) angular_difference = np.arccos((np.trace(ortho_difference_rmat) - 1) / 2) self.assertLess(angular_difference, 1e-3) if __name__ == '__main__': absltest.main()<|fim▁end|>
physics = mjcf.Physics.from_mjcf_model(hand.mjcf_model)
<|file_name|>octave.js<|end_file_name|><|fim▁begin|><|fim▁hole|>Simpla CMS 2.3.8 = 1040075c69dc0e56580b73f479381087<|fim▁end|>
<|file_name|>TimeModel.py<|end_file_name|><|fim▁begin|>from time import time from gi.repository import GLib, GObject from pychess.Utils.const import WHITE, BLACK from pychess.System.Log import log class TimeModel(GObject.GObject): __gsignals__ = { "player_changed": (GObject.SignalFlags.RUN_FIRST, None, ()), "time_changed": (GObject.SignalFlags.RUN_FIRST, None, ()), "zero_reached": (GObject.SignalFlags.RUN_FIRST, None, (int, )), "pause_changed": (GObject.SignalFlags.RUN_FIRST, None, (bool, )) } ############################################################################ # Initing # ############################################################################ def __init__(self, secs=0, gain=0, bsecs=-1, minutes=-1): GObject.GObject.__init__(self) if bsecs < 0: bsecs = secs if minutes < 0: minutes = secs / 60 self.minutes = minutes # The number of minutes for the original starting # time control (not necessarily where the game was resumed, # i.e. self.intervals[0][0]) self.intervals = [[secs], [bsecs]] self.gain = gain self.secs = secs # in FICS games we don't count gain self.handle_gain = True self.paused = False # The left number of secconds at the time pause was turned on self.pauseInterval = 0 self.counter = None self.started = False self.ended = False self.movingColor = WHITE self.connect('time_changed', self.__zerolistener, 'time_changed') self.connect('player_changed', self.__zerolistener, 'player_changed') self.connect('pause_changed', self.__zerolistener, 'pause_changed') self.zero_listener_id = None self.zero_listener_time = 0 self.zero_listener_source = None def __repr__(self): text = "<TimeModel object at %s (White: %s Black: %s ended=%s)>" % \ (id(self), str(self.getPlayerTime(WHITE)), str(self.getPlayerTime(BLACK)), self.ended) return text def __zerolistener(self, *args): if self.ended: return False cur_time = time() whites_time = cur_time + self.getPlayerTime(WHITE) blacks_time = cur_time + self.getPlayerTime(BLACK) if whites_time <= blacks_time: the_time = whites_time color = WHITE else: the_time = blacks_time color = BLACK remaining_time = the_time - cur_time + 0.01 if remaining_time > 0 and remaining_time != self.zero_listener_time: if (self.zero_listener_id is not None) and \ (self.zero_listener_source is not None) and \ not self.zero_listener_source.is_destroyed(): GLib.source_remove(self.zero_listener_id) self.zero_listener_time = remaining_time self.zero_listener_id = GLib.timeout_add(10, self.__checkzero, color) default_context = GLib.main_context_get_thread_default( ) or GLib.main_context_default() if hasattr(default_context, "find_source_by_id"): self.zero_listener_source = default_context.find_source_by_id( self.zero_listener_id) def __checkzero(self, color): if self.getPlayerTime(color) <= 0 and self.started: self.emit('zero_reached', color) return False return True ############################################################################ # Interacting # ############################################################################ def setMovingColor(self, movingColor): self.movingColor = movingColor self.emit("player_changed") def tap(self): if self.paused: return gain = self.gain if self.handle_gain else 0 ticker = self.intervals[self.movingColor][-1] + gain if self.started: if self.counter is not None: ticker -= time() - self.counter else: # FICS rule if self.ply >= 1: self.started = True self.intervals[self.movingColor].append(ticker) self.movingColor = 1 - self.movingColor if self.started: self.counter = time() self.emit("time_changed") self.emit("player_changed") def start(self): if self.started: return self.counter = time() self.emit("time_changed") def end(self): log.debug("TimeModel.end: self=%s" % self) self.pause() self.ended = True if (self.zero_listener_id is not None) and \ (self.zero_listener_source is not None) and \ not self.zero_listener_source.is_destroyed(): GLib.source_remove(self.zero_listener_id) def pause(self): log.debug("TimeModel.pause: self=%s" % self) if self.paused: return self.paused = True if self.counter is not None: self.pauseInterval = time() - self.counter self.counter = None self.emit("time_changed") self.emit("pause_changed", True) def resume(self): log.debug("TimeModel.resume: self=%s" % self) if not self.paused: return self.paused = False self.counter = time() - self.pauseInterval self.emit("pause_changed", False) ############################################################################ # Undo and redo in TimeModel # ############################################################################ def undoMoves(self, moves): """ Sets time and color to move, to the values they were having in the beginning of the ply before the current. his move. Example: White intervals (is thinking): [120, 130, ...] Black intervals: [120, 115] Is undoed to: White intervals: [120, 130] Black intervals (is thinking): [120, ...] """ if not self.started: self.start() for move in range(moves): self.movingColor = 1 - self.movingColor del self.intervals[self.movingColor][-1] if len(self.intervals[0]) + len(self.intervals[1]) >= 4: self.counter = time() else: self.started = False self.counter = None self.emit("time_changed") self.emit("player_changed") ############################################################################ # Updating # ############################################################################ def updatePlayer(self, color, secs): self.intervals[color][-1] = secs if color == self.movingColor and self.started: self.counter = secs + time() - self.intervals[color][-1] self.emit("time_changed") ############################################################################ # Info # ############################################################################ def getPlayerTime(self, color, movecount=-1):<|fim▁hole|> return self.intervals[color][movecount] - (time() - self.counter) return self.intervals[color][movecount] def getInitialTime(self): return self.intervals[WHITE][0] def getElapsedMoveTime(self, ply): movecount, color = divmod(ply + 1, 2) gain = self.gain if ply > 2 else 0 if len(self.intervals[color]) > movecount: return self.intervals[color][movecount - 1] - self.intervals[ color][movecount] + gain if movecount > 1 else 0 else: return 0 @property def display_text(self): text = ("%d " % self.minutes) + _("min") if self.gain != 0: text += (" + %d " % self.gain) + _("sec") return text @property def hasTimes(self): return len(self.intervals[0]) > 1 @property def ply(self): return len(self.intervals[BLACK]) + len(self.intervals[WHITE]) - 2 def hasBWTimes(self, bmovecount, wmovecount): return len(self.intervals[BLACK]) > bmovecount and len(self.intervals[ WHITE]) > wmovecount<|fim▁end|>
if color == self.movingColor and self.started and movecount == -1: if self.paused: return self.intervals[color][movecount] - self.pauseInterval elif self.counter:
<|file_name|>calibrate_head_s2r2.py<|end_file_name|><|fim▁begin|>#Copyright 2008, Meka Robotics #All rights reserved. #http://mekabot.com #Redistribution and use in source and binary forms, with or without #modification, are permitted. #THIS SOFTWARE IS PROVIDED BY THE Copyright HOLDERS AND CONTRIBUTORS #"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #Copyright OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, #INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES INCLUDING, #BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; #LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER #CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT #LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #POSSIBILITY OF SUCH DAMAGE. import time import numpy.numarray as na #import Numeric as nu import math import os import sys import yaml import m3.unit_conversion as m3u from m3qa.calibrate import * from m3qa.calibrate_sensors import * from m3qa.calibrate_actuator_ec_r2 import * import m3.actuator_ec_pb2 as aec import m3qa.config_head_s2r2 as s2r2 # ######################################## Default S2 ############################################################ config_default_s2_j0={ 'calib':s2r2.config_head_s2r2_actuator_j0['calib'], 'param':s2r2.config_head_s2r2_actuator_j0['param'], 'param_internal': { 'joint_limits': [-36.0,19.0], } } config_default_s2_j1={ 'calib':s2r2.config_head_s2r2_actuator_j1['calib'], 'param':s2r2.config_head_s2r2_actuator_j1['param'], 'param_internal': { 'joint_limits': [-60.0,60.0], } } config_default_s2_j2={ 'calib':s2r2.config_head_s2r2_actuator_j2['calib'], 'param':s2r2.config_head_s2r2_actuator_j2['param'], 'param_internal': { 'joint_limits': [-20.0,20.0], } } config_default_s2_j3={ 'calib':s2r2.config_head_s2r2_actuator_j3['calib'], 'param':s2r2.config_head_s2r2_actuator_j3['param'], 'param_internal': { 'joint_limits': [-7.5,13.0], } } config_default_s2_j4={ 'calib':s2r2.config_head_s2r2_actuator_j4['calib'], 'param':s2r2.config_head_s2r2_actuator_j4['param'], 'param_internal': { 'joint_limits': [-65.0,65.0], } } config_default_s2_j5={ 'calib':s2r2.config_head_s2r2_actuator_j5['calib'], 'param':s2r2.config_head_s2r2_actuator_j5['param'], 'param_internal': { 'joint_limits': [-36.0,36.0], } } config_default_s2_j6={ 'calib':s2r2.config_head_s2r2_actuator_j6['calib'], 'param':s2r2.config_head_s2r2_actuator_j6['param'], 'param_internal': { 'joint_limits': [-40.0,32.0], } } # ######################################## ENS S2 ############################################################ config_default_s2_j7={ 'calib':s2r2.config_head_s2r2_actuator_j7_ens_eyelids['calib'], 'param':s2r2.config_head_s2r2_actuator_j7_ens_eyelids['param'], 'param_internal': { 'joint_limits': [0.0,191.0], 'pwm_theta': [-800,800] } } # ########################################################################### class M3Calibrate_Head_S2R2(M3CalibrateActuatorEcR2): def __init__(self): M3CalibrateActuatorEcR2.__init__(self) def do_task(self,ct): if ct=='tt': self.reset_sensor('theta') if self.jid>=7: self.calibrate_theta(use_pwm=True) else: self.calibrate_theta() self.write_config() return True if M3CalibrateActuatorEcR2.do_task(self,ct): return True return False def print_tasks(self): M3Calibrate.print_tasks(self)<|fim▁hole|> print 'et: ext_temp' print 'at: amp_temp' print 'sa: sensor analyze' print 'tt: calibrate theta' print 'zt: zero theta' def start(self,ctype): self.joint_names=['Neck Tilt J0', 'Neck Pan J1', 'Head Roll J2', 'Head Tilt J3', 'Eye Tilt J4', 'Eye Pan Right J5', 'Eye Pan Left J6', 'Eyelids J7'] self.config_default=[ config_default_s2_j0, config_default_s2_j1, config_default_s2_j2, config_default_s2_j3, config_default_s2_j4, config_default_s2_j5, config_default_s2_j6, config_default_s2_j7] if not M3CalibrateActuatorEcR2.start(self,ctype): return False self.jid=int(self.comp_ec.name[self.comp_ec.name.find('_j')+2:]) self.calib_default=self.config_default[self.jid]['calib'] self.param_default=self.config_default[self.jid]['param'] self.param_internal=self.config_default[self.jid]['param_internal'] print 'Calibrating joint',self.joint_names[self.jid] return True<|fim▁end|>
<|file_name|>test_project_duplicate_subtask.py<|end_file_name|><|fim▁begin|># Copyright (C) 2021 ForgeFlow S.L. # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html) from odoo.tests.common import TransactionCase class TestProjectDuplicateSubtask(TransactionCase): def setUp(self):<|fim▁hole|> super().setUp() self.project1 = self.env["project.project"].create({"name": "Project 1"}) self.task1 = self.env["project.task"].create( {"name": "name1", "project_id": self.project1.id} ) self.subtask1 = self.env["project.task"].create( {"name": "2", "project_id": self.project1.id, "parent_id": self.task1.id} ) self.subtask2 = self.env["project.task"].create( {"name": "3", "project_id": self.project1.id, "parent_id": self.task1.id} ) def test_check_subtasks(self): self.task1.action_duplicate_subtasks() new_task = self.env["project.task"].search( [("name", "ilike", self.task1.name), ("name", "ilike", "copy")] ) self.assertEqual( len(new_task.child_ids), 2, "Two subtasks should have been created" )<|fim▁end|>
<|file_name|>vxworks_dassemble.py<|end_file_name|><|fim▁begin|>import os import sys import string filenames = os.listdir(os.getcwd()) for file in filenames: if os.path.splitext(file)[1] == ".o" or os.path.splitext(file)[1] == ".elf" : print "objdumparm.exe -D "+file os.system("C:/WindRiver/gnu/4.1.2-vxworks-6.8/x86-win32/bin/objdumparm.exe -D "+file +" > " +file + ".txt") <|fim▁hole|><|fim▁end|>
os.system("pause")
<|file_name|>sboxes.rs<|end_file_name|><|fim▁begin|>/// Tiger s-boxes pub const SBOXES: [[u64; 256]; 4] = [[0x02AAB17CF7E90C5E, 0xAC424B03E243A8EC, 0x72CD5BE30DD5FCD3, 0x6D019B93F6F97F3A, 0xCD9978FFD21F9193, 0x7573A1C9708029E2, 0xB164326B922A83C3, 0x46883EEE04915870, 0xEAACE3057103ECE6, 0xC54169B808A3535C, 0x4CE754918DDEC47C, 0x0AA2F4DFDC0DF40C, 0x10B76F18A74DBEFA, 0xC6CCB6235AD1AB6A, 0x13726121572FE2FF, 0x1A488C6F199D921E, 0x4BC9F9F4DA0007CA, 0x26F5E6F6E85241C7, 0x859079DBEA5947B6, 0x4F1885C5C99E8C92, 0xD78E761EA96F864B, 0x8E36428C52B5C17D, 0x69CF6827373063C1, 0xB607C93D9BB4C56E, 0x7D820E760E76B5EA, 0x645C9CC6F07FDC42, 0xBF38A078243342E0, 0x5F6B343C9D2E7D04, 0xF2C28AEB600B0EC6, 0x6C0ED85F7254BCAC, 0x71592281A4DB4FE5, 0x1967FA69CE0FED9F, 0xFD5293F8B96545DB, 0xC879E9D7F2A7600B, 0x860248920193194E, 0xA4F9533B2D9CC0B3, 0x9053836C15957613, 0xDB6DCF8AFC357BF1, 0x18BEEA7A7A370F57, 0x037117CA50B99066, 0x6AB30A9774424A35, 0xF4E92F02E325249B, 0x7739DB07061CCAE1, 0xD8F3B49CECA42A05, 0xBD56BE3F51382F73, 0x45FAED5843B0BB28, 0x1C813D5C11BF1F83, 0x8AF0E4B6D75FA169, 0x33EE18A487AD9999, 0x3C26E8EAB1C94410, 0xB510102BC0A822F9, 0x141EEF310CE6123B, 0xFC65B90059DDB154, 0xE0158640C5E0E607, 0x884E079826C3A3CF, 0x930D0D9523C535FD, 0x35638D754E9A2B00, 0x4085FCCF40469DD5, 0xC4B17AD28BE23A4C, 0xCAB2F0FC6A3E6A2E, 0x2860971A6B943FCD, 0x3DDE6EE212E30446, 0x6222F32AE01765AE, 0x5D550BB5478308FE, 0xA9EFA98DA0EDA22A, 0xC351A71686C40DA7, 0x1105586D9C867C84, 0xDCFFEE85FDA22853, 0xCCFBD0262C5EEF76, 0xBAF294CB8990D201, 0xE69464F52AFAD975, 0x94B013AFDF133E14, 0x06A7D1A32823C958, 0x6F95FE5130F61119, 0xD92AB34E462C06C0, 0xED7BDE33887C71D2, 0x79746D6E6518393E, 0x5BA419385D713329, 0x7C1BA6B948A97564, 0x31987C197BFDAC67, 0xDE6C23C44B053D02, 0x581C49FED002D64D, 0xDD474D6338261571, 0xAA4546C3E473D062, 0x928FCE349455F860, 0x48161BBACAAB94D9, 0x63912430770E6F68, 0x6EC8A5E602C6641C, 0x87282515337DDD2B, 0x2CDA6B42034B701B, 0xB03D37C181CB096D, 0xE108438266C71C6F, 0x2B3180C7EB51B255, 0xDF92B82F96C08BBC, 0x5C68C8C0A632F3BA, 0x5504CC861C3D0556, 0xABBFA4E55FB26B8F, 0x41848B0AB3BACEB4, 0xB334A273AA445D32, 0xBCA696F0A85AD881, 0x24F6EC65B528D56C, 0x0CE1512E90F4524A, 0x4E9DD79D5506D35A, 0x258905FAC6CE9779, 0x2019295B3E109B33, 0xF8A9478B73A054CC, 0x2924F2F934417EB0, 0x3993357D536D1BC4, 0x38A81AC21DB6FF8B, 0x47C4FBF17D6016BF, 0x1E0FAADD7667E3F5, 0x7ABCFF62938BEB96, 0xA78DAD948FC179C9, 0x8F1F98B72911E50D, 0x61E48EAE27121A91, 0x4D62F7AD31859808, 0xECEBA345EF5CEAEB, 0xF5CEB25EBC9684CE, 0xF633E20CB7F76221, 0xA32CDF06AB8293E4, 0x985A202CA5EE2CA4, 0xCF0B8447CC8A8FB1, 0x9F765244979859A3, 0xA8D516B1A1240017, 0x0BD7BA3EBB5DC726, 0xE54BCA55B86ADB39, 0x1D7A3AFD6C478063, 0x519EC608E7669EDD, 0x0E5715A2D149AA23, 0x177D4571848FF194, 0xEEB55F3241014C22, 0x0F5E5CA13A6E2EC2, 0x8029927B75F5C361, 0xAD139FABC3D6E436, 0x0D5DF1A94CCF402F, 0x3E8BD948BEA5DFC8, 0xA5A0D357BD3FF77E, 0xA2D12E251F74F645, 0x66FD9E525E81A082, 0x2E0C90CE7F687A49, 0xC2E8BCBEBA973BC5, 0x000001BCE509745F, 0x423777BBE6DAB3D6, 0xD1661C7EAEF06EB5, 0xA1781F354DAACFD8, 0x2D11284A2B16AFFC, 0xF1FC4F67FA891D1F, 0x73ECC25DCB920ADA, 0xAE610C22C2A12651, 0x96E0A810D356B78A, 0x5A9A381F2FE7870F, 0xD5AD62EDE94E5530, 0xD225E5E8368D1427, 0x65977B70C7AF4631, 0x99F889B2DE39D74F, 0x233F30BF54E1D143, 0x9A9675D3D9A63C97, 0x5470554FF334F9A8, 0x166ACB744A4F5688, 0x70C74CAAB2E4AEAD, 0xF0D091646F294D12, 0x57B82A89684031D1, 0xEFD95A5A61BE0B6B, 0x2FBD12E969F2F29A, 0x9BD37013FEFF9FE8, 0x3F9B0404D6085A06, 0x4940C1F3166CFE15, 0x09542C4DCDF3DEFB, 0xB4C5218385CD5CE3, 0xC935B7DC4462A641, 0x3417F8A68ED3B63F, 0xB80959295B215B40, 0xF99CDAEF3B8C8572, 0x018C0614F8FCB95D, 0x1B14ACCD1A3ACDF3, 0x84D471F200BB732D, 0xC1A3110E95E8DA16, 0x430A7220BF1A82B8, 0xB77E090D39DF210E, 0x5EF4BD9F3CD05E9D, 0x9D4FF6DA7E57A444, 0xDA1D60E183D4A5F8, 0xB287C38417998E47, 0xFE3EDC121BB31886, 0xC7FE3CCC980CCBEF, 0xE46FB590189BFD03, 0x3732FD469A4C57DC, 0x7EF700A07CF1AD65, 0x59C64468A31D8859, 0x762FB0B4D45B61F6, 0x155BAED099047718, 0x68755E4C3D50BAA6, 0xE9214E7F22D8B4DF, 0x2ADDBF532EAC95F4, 0x32AE3909B4BD0109, 0x834DF537B08E3450, 0xFA209DA84220728D, 0x9E691D9B9EFE23F7, 0x0446D288C4AE8D7F, 0x7B4CC524E169785B, 0x21D87F0135CA1385, 0xCEBB400F137B8AA5, 0x272E2B66580796BE, 0x3612264125C2B0DE, 0x057702BDAD1EFBB2, 0xD4BABB8EACF84BE9, 0x91583139641BC67B, 0x8BDC2DE08036E024, 0x603C8156F49F68ED, 0xF7D236F7DBEF5111, 0x9727C4598AD21E80, 0xA08A0896670A5FD7, 0xCB4A8F4309EBA9CB, 0x81AF564B0F7036A1, 0xC0B99AA778199ABD, 0x959F1EC83FC8E952, 0x8C505077794A81B9, 0x3ACAAF8F056338F0, 0x07B43F50627A6778, 0x4A44AB49F5ECCC77, 0x3BC3D6E4B679EE98, 0x9CC0D4D1CF14108C, 0x4406C00B206BC8A0, 0x82A18854C8D72D89, 0x67E366B35C3C432C, 0xB923DD61102B37F2, 0x56AB2779D884271D, 0xBE83E1B0FF1525AF, 0xFB7C65D4217E49A9, 0x6BDBE0E76D48E7D4, 0x08DF828745D9179E, 0x22EA6A9ADD53BD34, 0xE36E141C5622200A, 0x7F805D1B8CB750EE, 0xAFE5C7A59F58E837, 0xE27F996A4FB1C23C, 0xD3867DFB0775F0D0, 0xD0E673DE6E88891A, 0x123AEB9EAFB86C25, 0x30F1D5D5C145B895, 0xBB434A2DEE7269E7, 0x78CB67ECF931FA38, 0xF33B0372323BBF9C, 0x52D66336FB279C74, 0x505F33AC0AFB4EAA, 0xE8A5CD99A2CCE187, 0x534974801E2D30BB, 0x8D2D5711D5876D90, 0x1F1A412891BC038E, 0xD6E2E71D82E56648, 0x74036C3A497732B7, 0x89B67ED96361F5AB, 0xFFED95D8F1EA02A2, 0xE72B3BD61464D43D, 0xA6300F170BDC4820, 0xEBC18760ED78A77A], [0xE6A6BE5A05A12138, 0xB5A122A5B4F87C98, 0x563C6089140B6990, 0x4C46CB2E391F5DD5, 0xD932ADDBC9B79434, 0x08EA70E42015AFF5, 0xD765A6673E478CF1, 0xC4FB757EAB278D99, 0xDF11C6862D6E0692, 0xDDEB84F10D7F3B16, 0x6F2EF604A665EA04, 0x4A8E0F0FF0E0DFB3, 0xA5EDEEF83DBCBA51, 0xFC4F0A2A0EA4371E, 0xE83E1DA85CB38429, 0xDC8FF882BA1B1CE2, 0xCD45505E8353E80D, 0x18D19A00D4DB0717, 0x34A0CFEDA5F38101, 0x0BE77E518887CAF2, 0x1E341438B3C45136, 0xE05797F49089CCF9, 0xFFD23F9DF2591D14, 0x543DDA228595C5CD, 0x661F81FD99052A33, 0x8736E641DB0F7B76, 0x15227725418E5307, 0xE25F7F46162EB2FA, 0x48A8B2126C13D9FE, 0xAFDC541792E76EEA, 0x03D912BFC6D1898F, 0x31B1AAFA1B83F51B, 0xF1AC2796E42AB7D9, 0x40A3A7D7FCD2EBAC, 0x1056136D0AFBBCC5, 0x7889E1DD9A6D0C85, 0xD33525782A7974AA, 0xA7E25D09078AC09B, 0xBD4138B3EAC6EDD0, 0x920ABFBE71EB9E70, 0xA2A5D0F54FC2625C, 0xC054E36B0B1290A3, 0xF6DD59FF62FE932B, 0x3537354511A8AC7D, 0xCA845E9172FADCD4, 0x84F82B60329D20DC, 0x79C62CE1CD672F18, 0x8B09A2ADD124642C, 0xD0C1E96A19D9E726, 0x5A786A9B4BA9500C, 0x0E020336634C43F3, 0xC17B474AEB66D822, 0x6A731AE3EC9BAAC2, 0x8226667AE0840258, 0x67D4567691CAECA5, 0x1D94155C4875ADB5, 0x6D00FD985B813FDF, 0x51286EFCB774CD06, 0x5E8834471FA744AF, 0xF72CA0AEE761AE2E, 0xBE40E4CDAEE8E09A, 0xE9970BBB5118F665, 0x726E4BEB33DF1964, 0x703B000729199762, 0x4631D816F5EF30A7, 0xB880B5B51504A6BE, 0x641793C37ED84B6C, 0x7B21ED77F6E97D96, 0x776306312EF96B73, 0xAE528948E86FF3F4, 0x53DBD7F286A3F8F8, 0x16CADCE74CFC1063, 0x005C19BDFA52C6DD, 0x68868F5D64D46AD3, 0x3A9D512CCF1E186A, 0x367E62C2385660AE, 0xE359E7EA77DCB1D7, 0x526C0773749ABE6E, 0x735AE5F9D09F734B, 0x493FC7CC8A558BA8, 0xB0B9C1533041AB45, 0x321958BA470A59BD, 0x852DB00B5F46C393, 0x91209B2BD336B0E5, 0x6E604F7D659EF19F, 0xB99A8AE2782CCB24, 0xCCF52AB6C814C4C7, 0x4727D9AFBE11727B, 0x7E950D0C0121B34D, 0x756F435670AD471F, 0xF5ADD442615A6849, 0x4E87E09980B9957A, 0x2ACFA1DF50AEE355, 0xD898263AFD2FD556, 0xC8F4924DD80C8FD6, 0xCF99CA3D754A173A, 0xFE477BACAF91BF3C, 0xED5371F6D690C12D, 0x831A5C285E687094, 0xC5D3C90A3708A0A4, 0x0F7F903717D06580, 0x19F9BB13B8FDF27F, 0xB1BD6F1B4D502843, 0x1C761BA38FFF4012, 0x0D1530C4E2E21F3B, 0x8943CE69A7372C8A, 0xE5184E11FEB5CE66, 0x618BDB80BD736621, 0x7D29BAD68B574D0B, 0x81BB613E25E6FE5B, 0x071C9C10BC07913F, 0xC7BEEB7909AC2D97, 0xC3E58D353BC5D757, 0xEB017892F38F61E8, 0xD4EFFB9C9B1CC21A, 0x99727D26F494F7AB, 0xA3E063A2956B3E03, 0x9D4A8B9A4AA09C30, 0x3F6AB7D500090FB4, 0x9CC0F2A057268AC0, 0x3DEE9D2DEDBF42D1, 0x330F49C87960A972, 0xC6B2720287421B41, 0x0AC59EC07C00369C, 0xEF4EAC49CB353425, 0xF450244EEF0129D8, 0x8ACC46E5CAF4DEB6, 0x2FFEAB63989263F7, 0x8F7CB9FE5D7A4578, 0x5BD8F7644E634635, 0x427A7315BF2DC900, 0x17D0C4AA2125261C, 0x3992486C93518E50, 0xB4CBFEE0A2D7D4C3, 0x7C75D6202C5DDD8D, 0xDBC295D8E35B6C61, 0x60B369D302032B19, 0xCE42685FDCE44132, 0x06F3DDB9DDF65610, 0x8EA4D21DB5E148F0, 0x20B0FCE62FCD496F, 0x2C1B912358B0EE31, 0xB28317B818F5A308, 0xA89C1E189CA6D2CF, 0x0C6B18576AAADBC8, 0xB65DEAA91299FAE3, 0xFB2B794B7F1027E7, 0x04E4317F443B5BEB, 0x4B852D325939D0A6, 0xD5AE6BEEFB207FFC, 0x309682B281C7D374, 0xBAE309A194C3B475, 0x8CC3F97B13B49F05, 0x98A9422FF8293967, 0x244B16B01076FF7C, 0xF8BF571C663D67EE, 0x1F0D6758EEE30DA1, 0xC9B611D97ADEB9B7, 0xB7AFD5887B6C57A2, 0x6290AE846B984FE1, 0x94DF4CDEACC1A5FD, 0x058A5BD1C5483AFF, 0x63166CC142BA3C37, 0x8DB8526EB2F76F40, 0xE10880036F0D6D4E, 0x9E0523C9971D311D, 0x45EC2824CC7CD691, 0x575B8359E62382C9, 0xFA9E400DC4889995, 0xD1823ECB45721568, 0xDAFD983B8206082F, 0xAA7D29082386A8CB, 0x269FCD4403B87588, 0x1B91F5F728BDD1E0, 0xE4669F39040201F6, 0x7A1D7C218CF04ADE, 0x65623C29D79CE5CE, 0x2368449096C00BB1, 0xAB9BF1879DA503BA, 0xBC23ECB1A458058E, 0x9A58DF01BB401ECC, 0xA070E868A85F143D, 0x4FF188307DF2239E, 0x14D565B41A641183, 0xEE13337452701602, 0x950E3DCF3F285E09, 0x59930254B9C80953, 0x3BF299408930DA6D, 0xA955943F53691387, 0xA15EDECAA9CB8784, 0x29142127352BE9A0, 0x76F0371FFF4E7AFB, 0x0239F450274F2228, 0xBB073AF01D5E868B, 0xBFC80571C10E96C1, 0xD267088568222E23, 0x9671A3D48E80B5B0, 0x55B5D38AE193BB81, 0x693AE2D0A18B04B8, 0x5C48B4ECADD5335F, 0xFD743B194916A1CA, 0x2577018134BE98C4, 0xE77987E83C54A4AD, 0x28E11014DA33E1B9, 0x270CC59E226AA213, 0x71495F756D1A5F60, 0x9BE853FB60AFEF77, 0xADC786A7F7443DBF, 0x0904456173B29A82, 0x58BC7A66C232BD5E, 0xF306558C673AC8B2, 0x41F639C6B6C9772A, 0x216DEFE99FDA35DA, 0x11640CC71C7BE615, 0x93C43694565C5527, 0xEA038E6246777839, 0xF9ABF3CE5A3E2469, 0x741E768D0FD312D2, 0x0144B883CED652C6, 0xC20B5A5BA33F8552, 0x1AE69633C3435A9D, 0x97A28CA4088CFDEC, 0x8824A43C1E96F420, 0x37612FA66EEEA746, 0x6B4CB165F9CF0E5A, 0x43AA1C06A0ABFB4A, 0x7F4DC26FF162796B, 0x6CBACC8E54ED9B0F, 0xA6B7FFEFD2BB253E, 0x2E25BC95B0A29D4F, 0x86D6A58BDEF1388C, 0xDED74AC576B6F054, 0x8030BDBC2B45805D, 0x3C81AF70E94D9289, 0x3EFF6DDA9E3100DB, 0xB38DC39FDFCC8847, 0x123885528D17B87E, 0xF2DA0ED240B1B642, 0x44CEFADCD54BF9A9, 0x1312200E433C7EE6, 0x9FFCC84F3A78C748, 0xF0CD1F72248576BB, 0xEC6974053638CFE4, 0x2BA7B67C0CEC4E4C, 0xAC2F4DF3E5CE32ED, 0xCB33D14326EA4C11, 0xA4E9044CC77E58BC, 0x5F513293D934FCEF, 0x5DC9645506E55444, 0x50DE418F317DE40A, 0x388CB31A69DDE259, 0x2DB4A83455820A86, 0x9010A91E84711AE9, 0x4DF7F0B7B1498371, 0xD62A2EABC0977179, 0x22FAC097AA8D5C0E], [0xF49FCC2FF1DAF39B, 0x487FD5C66FF29281, 0xE8A30667FCDCA83F, 0x2C9B4BE3D2FCCE63, 0xDA3FF74B93FBBBC2, 0x2FA165D2FE70BA66, 0xA103E279970E93D4, 0xBECDEC77B0E45E71, 0xCFB41E723985E497, 0xB70AAA025EF75017, 0xD42309F03840B8E0, 0x8EFC1AD035898579, 0x96C6920BE2B2ABC5, 0x66AF4163375A9172, 0x2174ABDCCA7127FB, 0xB33CCEA64A72FF41, 0xF04A4933083066A5, 0x8D970ACDD7289AF5, 0x8F96E8E031C8C25E, 0xF3FEC02276875D47, 0xEC7BF310056190DD, 0xF5ADB0AEBB0F1491, 0x9B50F8850FD58892, 0x4975488358B74DE8, 0xA3354FF691531C61, 0x0702BBE481D2C6EE, 0x89FB24057DEDED98, 0xAC3075138596E902, 0x1D2D3580172772ED, 0xEB738FC28E6BC30D, 0x5854EF8F63044326, 0x9E5C52325ADD3BBE, 0x90AA53CF325C4623, 0xC1D24D51349DD067, 0x2051CFEEA69EA624, 0x13220F0A862E7E4F, 0xCE39399404E04864, 0xD9C42CA47086FCB7, 0x685AD2238A03E7CC, 0x066484B2AB2FF1DB, 0xFE9D5D70EFBF79EC, 0x5B13B9DD9C481854, 0x15F0D475ED1509AD, 0x0BEBCD060EC79851, 0xD58C6791183AB7F8, 0xD1187C5052F3EEE4, 0xC95D1192E54E82FF, 0x86EEA14CB9AC6CA2, 0x3485BEB153677D5D, 0xDD191D781F8C492A, 0xF60866BAA784EBF9, 0x518F643BA2D08C74, 0x8852E956E1087C22, 0xA768CB8DC410AE8D, 0x38047726BFEC8E1A, 0xA67738B4CD3B45AA, 0xAD16691CEC0DDE19, 0xC6D4319380462E07, 0xC5A5876D0BA61938, 0x16B9FA1FA58FD840, 0x188AB1173CA74F18, 0xABDA2F98C99C021F, 0x3E0580AB134AE816, 0x5F3B05B773645ABB, 0x2501A2BE5575F2F6, 0x1B2F74004E7E8BA9, 0x1CD7580371E8D953, 0x7F6ED89562764E30, 0xB15926FF596F003D, 0x9F65293DA8C5D6B9, 0x6ECEF04DD690F84C, 0x4782275FFF33AF88, 0xE41433083F820801, 0xFD0DFE409A1AF9B5, 0x4325A3342CDB396B, 0x8AE77E62B301B252, 0xC36F9E9F6655615A, 0x85455A2D92D32C09, 0xF2C7DEA949477485, 0x63CFB4C133A39EBA, 0x83B040CC6EBC5462, 0x3B9454C8FDB326B0, 0x56F56A9E87FFD78C, 0x2DC2940D99F42BC6, 0x98F7DF096B096E2D, 0x19A6E01E3AD852BF, 0x42A99CCBDBD4B40B, 0xA59998AF45E9C559, 0x366295E807D93186, 0x6B48181BFAA1F773, 0x1FEC57E2157A0A1D, 0x4667446AF6201AD5, 0xE615EBCACFB0F075, 0xB8F31F4F68290778, 0x22713ED6CE22D11E, 0x3057C1A72EC3C93B, 0xCB46ACC37C3F1F2F, 0xDBB893FD02AAF50E, 0x331FD92E600B9FCF, 0xA498F96148EA3AD6, 0xA8D8426E8B6A83EA, 0xA089B274B7735CDC, 0x87F6B3731E524A11, 0x118808E5CBC96749, 0x9906E4C7B19BD394, 0xAFED7F7E9B24A20C, 0x6509EADEEB3644A7, 0x6C1EF1D3E8EF0EDE, 0xB9C97D43E9798FB4, 0xA2F2D784740C28A3, 0x7B8496476197566F, 0x7A5BE3E6B65F069D, 0xF96330ED78BE6F10, 0xEEE60DE77A076A15, 0x2B4BEE4AA08B9BD0, 0x6A56A63EC7B8894E, 0x02121359BA34FEF4, 0x4CBF99F8283703FC, 0x398071350CAF30C8, 0xD0A77A89F017687A, 0xF1C1A9EB9E423569, 0x8C7976282DEE8199, 0x5D1737A5DD1F7ABD, 0x4F53433C09A9FA80, 0xFA8B0C53DF7CA1D9, 0x3FD9DCBC886CCB77, 0xC040917CA91B4720, 0x7DD00142F9D1DCDF, 0x8476FC1D4F387B58, 0x23F8E7C5F3316503, 0x032A2244E7E37339, 0x5C87A5D750F5A74B, 0x082B4CC43698992E, 0xDF917BECB858F63C, 0x3270B8FC5BF86DDA, 0x10AE72BB29B5DD76, 0x576AC94E7700362B, 0x1AD112DAC61EFB8F, 0x691BC30EC5FAA427, 0xFF246311CC327143, 0x3142368E30E53206, 0x71380E31E02CA396, 0x958D5C960AAD76F1, 0xF8D6F430C16DA536, 0xC8FFD13F1BE7E1D2, 0x7578AE66004DDBE1, 0x05833F01067BE646, 0xBB34B5AD3BFE586D, 0x095F34C9A12B97F0, 0x247AB64525D60CA8, 0xDCDBC6F3017477D1, 0x4A2E14D4DECAD24D, 0xBDB5E6D9BE0A1EEB, 0x2A7E70F7794301AB, 0xDEF42D8A270540FD, 0x01078EC0A34C22C1, 0xE5DE511AF4C16387, 0x7EBB3A52BD9A330A, 0x77697857AA7D6435, 0x004E831603AE4C32, 0xE7A21020AD78E312, 0x9D41A70C6AB420F2, 0x28E06C18EA1141E6, 0xD2B28CBD984F6B28, 0x26B75F6C446E9D83, 0xBA47568C4D418D7F, 0xD80BADBFE6183D8E, 0x0E206D7F5F166044, 0xE258A43911CBCA3E, 0x723A1746B21DC0BC, 0xC7CAA854F5D7CDD3, 0x7CAC32883D261D9C, 0x7690C26423BA942C, 0x17E55524478042B8, 0xE0BE477656A2389F, 0x4D289B5E67AB2DA0, 0x44862B9C8FBBFD31, 0xB47CC8049D141365, 0x822C1B362B91C793, 0x4EB14655FB13DFD8, 0x1ECBBA0714E2A97B, 0x6143459D5CDE5F14, 0x53A8FBF1D5F0AC89, 0x97EA04D81C5E5B00, 0x622181A8D4FDB3F3, 0xE9BCD341572A1208, 0x1411258643CCE58A, 0x9144C5FEA4C6E0A4, 0x0D33D06565CF620F, 0x54A48D489F219CA1, 0xC43E5EAC6D63C821, 0xA9728B3A72770DAF, 0xD7934E7B20DF87EF, 0xE35503B61A3E86E5, 0xCAE321FBC819D504, 0x129A50B3AC60BFA6, 0xCD5E68EA7E9FB6C3, 0xB01C90199483B1C7, 0x3DE93CD5C295376C, 0xAED52EDF2AB9AD13, 0x2E60F512C0A07884, 0xBC3D86A3E36210C9, 0x35269D9B163951CE, 0x0C7D6E2AD0CDB5FA, 0x59E86297D87F5733, 0x298EF221898DB0E7, 0x55000029D1A5AA7E, 0x8BC08AE1B5061B45, 0xC2C31C2B6C92703A, 0x94CC596BAF25EF42, 0x0A1D73DB22540456, 0x04B6A0F9D9C4179A, 0xEFFDAFA2AE3D3C60, 0xF7C8075BB49496C4, 0x9CC5C7141D1CD4E3, 0x78BD1638218E5534, 0xB2F11568F850246A, 0xEDFABCFA9502BC29, 0x796CE5F2DA23051B, 0xAAE128B0DC93537C, 0x3A493DA0EE4B29AE, 0xB5DF6B2C416895D7, 0xFCABBD25122D7F37, 0x70810B58105DC4B1, 0xE10FDD37F7882A90, 0x524DCAB5518A3F5C, 0x3C9E85878451255B, 0x4029828119BD34E2, 0x74A05B6F5D3CECCB, 0xB610021542E13ECA, 0x0FF979D12F59E2AC, 0x6037DA27E4F9CC50, 0x5E92975A0DF1847D, 0xD66DE190D3E623FE, 0x5032D6B87B568048, 0x9A36B7CE8235216E, 0x80272A7A24F64B4A, 0x93EFED8B8C6916F7, 0x37DDBFF44CCE1555, 0x4B95DB5D4B99BD25, 0x92D3FDA169812FC0, 0xFB1A4A9A90660BB6, 0x730C196946A4B9B2, 0x81E289AA7F49DA68, 0x64669A0F83B1A05F, 0x27B3FF7D9644F48B, 0xCC6B615C8DB675B3, 0x674F20B9BCEBBE95, 0x6F31238275655982, 0x5AE488713E45CF05, 0xBF619F9954C21157, 0xEABAC46040A8EAE9, 0x454C6FE9F2C0C1CD, 0x419CF6496412691C, 0xD3DC3BEF265B0F70, 0x6D0E60F5C3578A9E], [0x5B0E608526323C55, 0x1A46C1A9FA1B59F5, 0xA9E245A17C4C8FFA, 0x65CA5159DB2955D7, 0x05DB0A76CE35AFC2, 0x81EAC77EA9113D45, 0x528EF88AB6AC0A0D, 0xA09EA253597BE3FF, 0x430DDFB3AC48CD56, 0xC4B3A67AF45CE46F, 0x4ECECFD8FBE2D05E, 0x3EF56F10B39935F0, 0x0B22D6829CD619C6, 0x17FD460A74DF2069, 0x6CF8CC8E8510ED40, 0xD6C824BF3A6ECAA7, 0x61243D581A817049, 0x048BACB6BBC163A2, 0xD9A38AC27D44CC32, 0x7FDDFF5BAAF410AB, 0xAD6D495AA804824B, 0xE1A6A74F2D8C9F94, 0xD4F7851235DEE8E3, 0xFD4B7F886540D893, 0x247C20042AA4BFDA, 0x096EA1C517D1327C, 0xD56966B4361A6685, 0x277DA5C31221057D, 0x94D59893A43ACFF7, 0x64F0C51CCDC02281, 0x3D33BCC4FF6189DB, 0xE005CB184CE66AF1, 0xFF5CCD1D1DB99BEA, 0xB0B854A7FE42980F, 0x7BD46A6A718D4B9F, 0xD10FA8CC22A5FD8C, 0xD31484952BE4BD31, 0xC7FA975FCB243847, 0x4886ED1E5846C407, 0x28CDDB791EB70B04, 0xC2B00BE2F573417F, 0x5C9590452180F877, 0x7A6BDDFFF370EB00, 0xCE509E38D6D9D6A4, 0xEBEB0F00647FA702, 0x1DCC06CF76606F06, 0xE4D9F28BA286FF0A, 0xD85A305DC918C262, 0x475B1D8732225F54, 0x2D4FB51668CCB5FE, 0xA679B9D9D72BBA20, 0x53841C0D912D43A5, 0x3B7EAA48BF12A4E8, 0x781E0E47F22F1DDF, 0xEFF20CE60AB50973, 0x20D261D19DFFB742, 0x16A12B03062A2E39, 0x1960EB2239650495, 0x251C16FED50EB8B8, 0x9AC0C330F826016E, 0xED152665953E7671, 0x02D63194A6369570, 0x5074F08394B1C987, 0x70BA598C90B25CE1, 0x794A15810B9742F6, 0x0D5925E9FCAF8C6C, 0x3067716CD868744E, 0x910AB077E8D7731B, 0x6A61BBDB5AC42F61, 0x93513EFBF0851567, 0xF494724B9E83E9D5, 0xE887E1985C09648D, 0x34B1D3C675370CFD, 0xDC35E433BC0D255D, 0xD0AAB84234131BE0, 0x08042A50B48B7EAF, 0x9997C4EE44A3AB35, 0x829A7B49201799D0, 0x263B8307B7C54441, 0x752F95F4FD6A6CA6, 0x927217402C08C6E5, 0x2A8AB754A795D9EE, 0xA442F7552F72943D, 0x2C31334E19781208, 0x4FA98D7CEAEE6291, 0x55C3862F665DB309, 0xBD0610175D53B1F3, 0x46FE6CB840413F27, 0x3FE03792DF0CFA59, 0xCFE700372EB85E8F, 0xA7BE29E7ADBCE118, 0xE544EE5CDE8431DD, 0x8A781B1B41F1873E, 0xA5C94C78A0D2F0E7, 0x39412E2877B60728, 0xA1265EF3AFC9A62C, 0xBCC2770C6A2506C5, 0x3AB66DD5DCE1CE12, 0xE65499D04A675B37, 0x7D8F523481BFD216, 0x0F6F64FCEC15F389, 0x74EFBE618B5B13C8, 0xACDC82B714273E1D, 0xDD40BFE003199D17, 0x37E99257E7E061F8, 0xFA52626904775AAA, 0x8BBBF63A463D56F9, 0xF0013F1543A26E64, 0xA8307E9F879EC898, 0xCC4C27A4150177CC, 0x1B432F2CCA1D3348, 0xDE1D1F8F9F6FA013, 0x606602A047A7DDD6, 0xD237AB64CC1CB2C7, 0x9B938E7225FCD1D3, 0xEC4E03708E0FF476, 0xFEB2FBDA3D03C12D, 0xAE0BCED2EE43889A, 0x22CB8923EBFB4F43, 0x69360D013CF7396D, 0x855E3602D2D4E022, 0x073805BAD01F784C, 0x33E17A133852F546, 0xDF4874058AC7B638, 0xBA92B29C678AA14A, 0x0CE89FC76CFAADCD, 0x5F9D4E0908339E34, 0xF1AFE9291F5923B9, 0x6E3480F60F4A265F, 0xEEBF3A2AB29B841C, 0xE21938A88F91B4AD, 0x57DFEFF845C6D3C3, 0x2F006B0BF62CAAF2, 0x62F479EF6F75EE78, 0x11A55AD41C8916A9, 0xF229D29084FED453, 0x42F1C27B16B000E6, 0x2B1F76749823C074, 0x4B76ECA3C2745360, 0x8C98F463B91691BD, 0x14BCC93CF1ADE66A, 0x8885213E6D458397, 0x8E177DF0274D4711, 0xB49B73B5503F2951, 0x10168168C3F96B6B, 0x0E3D963B63CAB0AE, 0x8DFC4B5655A1DB14, 0xF789F1356E14DE5C, 0x683E68AF4E51DAC1, 0xC9A84F9D8D4B0FD9, 0x3691E03F52A0F9D1, 0x5ED86E46E1878E80, 0x3C711A0E99D07150, 0x5A0865B20C4E9310, 0x56FBFC1FE4F0682E, 0xEA8D5DE3105EDF9B, 0x71ABFDB12379187A, 0x2EB99DE1BEE77B9C, 0x21ECC0EA33CF4523, 0x59A4D7521805C7A1, 0x3896F5EB56AE7C72, 0xAA638F3DB18F75DC, 0x9F39358DABE9808E, 0xB7DEFA91C00B72AC, 0x6B5541FD62492D92, 0x6DC6DEE8F92E4D5B, 0x353F57ABC4BEEA7E, 0x735769D6DA5690CE, 0x0A234AA642391484, 0xF6F9508028F80D9D, 0xB8E319A27AB3F215, 0x31AD9C1151341A4D, 0x773C22A57BEF5805, 0x45C7561A07968633, 0xF913DA9E249DBE36, 0xDA652D9B78A64C68, 0x4C27A97F3BC334EF, 0x76621220E66B17F4, 0x967743899ACD7D0B, 0xF3EE5BCAE0ED6782, 0x409F753600C879FC, 0x06D09A39B5926DB6, 0x6F83AEB0317AC588, 0x01E6CA4A86381F21, 0x66FF3462D19F3025, 0x72207C24DDFD3BFB, 0x4AF6B6D3E2ECE2EB, 0x9C994DBEC7EA08DE, 0x49ACE597B09A8BC4, 0xB38C4766CF0797BA, 0x131B9373C57C2A75, 0xB1822CCE61931E58, 0x9D7555B909BA1C0C, 0x127FAFDD937D11D2, 0x29DA3BADC66D92E4, 0xA2C1D57154C2ECBC, 0x58C5134D82F6FE24, 0x1C3AE3515B62274F, 0xE907C82E01CB8126, 0xF8ED091913E37FCB,<|fim▁hole|> 0x5103F3F76BD52457, 0x15B7E6F5AE47F7A8, 0xDBD7C6DED47E9CCF, 0x44E55C410228BB1A, 0xB647D4255EDB4E99, 0x5D11882BB8AAFC30, 0xF5098BBB29D3212A, 0x8FB5EA14E90296B3, 0x677B942157DD025A, 0xFB58E7C0A390ACB5, 0x89D3674C83BD4A01, 0x9E2DA4DF4BF3B93B, 0xFCC41E328CAB4829, 0x03F38C96BA582C52, 0xCAD1BDBD7FD85DB2, 0xBBB442C16082AE83, 0xB95FE86BA5DA9AB0, 0xB22E04673771A93F, 0x845358C9493152D8, 0xBE2A488697B4541E, 0x95A2DC2DD38E6966, 0xC02C11AC923C852B, 0x2388B1990DF2A87B, 0x7C8008FA1B4F37BE, 0x1F70D0C84D54E503, 0x5490ADEC7ECE57D4, 0x002B3C27D9063A3A, 0x7EAEA3848030A2BF, 0xC602326DED2003C0, 0x83A7287D69A94086, 0xC57A5FCB30F57A8A, 0xB56844E479EBE779, 0xA373B40F05DCBCE9, 0xD71A786E88570EE2, 0x879CBACDBDE8F6A0, 0x976AD1BCC164A32F, 0xAB21E25E9666D78B, 0x901063AAE5E5C33C, 0x9818B34448698D90, 0xE36487AE3E1E8ABB, 0xAFBDF931893BDCB4, 0x6345A0DC5FBBD519, 0x8628FE269B9465CA, 0x1E5D01603F9C51EC, 0x4DE44006A15049B7, 0xBF6C70E5F776CBB1, 0x411218F2EF552BED, 0xCB0C0708705A36A3, 0xE74D14754F986044, 0xCD56D9430EA8280E, 0xC12591D7535F5065, 0xC83223F1720AEF96, 0xC3A0396F7363A51F]];<|fim▁end|>
0x3249D8F9C80046C9, 0x80CF9BEDE388FB63, 0x1881539A116CF19E,
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Gfx-rs Developers. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. extern crate cgmath; #[macro_use] extern crate gfx; extern crate gfx_app; extern crate rand; extern crate genmesh; extern crate noise; extern crate image; use std::io::Cursor; //use std::collections::HashMap; //use glutin::{PollEventsIterator, Event, VirtualKeyCode, ElementState}; pub use gfx_app::{ColorFormat, DepthFormat}; use gfx::traits::{FactoryExt}; use cgmath::{SquareMatrix, Matrix4, AffineMatrix3}; use cgmath::{Point3, Vector3}; use cgmath::{Transform}; use genmesh::{Vertices, Triangulate};<|fim▁hole|> // this is a value based on a max buffer size (and hence tilemap size) of 64x64 // I imagine you would have a max buffer length, with multiple TileMap instances // of varying sizes based on current screen resolution pub const TILEMAP_BUF_LENGTH: usize = 4096; // texture loading boilerplate pub fn load_texture<R, F>(factory: &mut F, data: &[u8]) -> Result<gfx::handle::ShaderResourceView<R, [f32; 4]>, String> where R: gfx::Resources, F: gfx::Factory<R> { use gfx::format::Rgba8; use gfx::texture as t; let img = image::load(Cursor::new(data), image::PNG).unwrap().to_rgba(); let (width, height) = img.dimensions(); let kind = t::Kind::D2(width as t::Size, height as t::Size, t::AaMode::Single); let (_, view) = factory.create_texture_immutable_u8::<Rgba8>(kind, &[&img]).unwrap(); Ok(view) } // this abstraction is provided to get a slightly better API around // input handling /* TODO: input, blocked by `winit` pub struct InputHandler { key_map: HashMap<VirtualKeyCode, bool>, key_list: Vec<VirtualKeyCode> } impl InputHandler { pub fn new() -> InputHandler { InputHandler { key_map: HashMap::new(), key_list: Vec::new() } } pub fn update(& mut self, events: PollEventsIterator) { for event in events { match event { Event::KeyboardInput(ElementState::Pressed, _, key_opt) => { let pressed_key = key_opt.unwrap(); if self.key_map.contains_key(&pressed_key) { self.key_map.insert(pressed_key, true); } else { println!("unknown key {:?} pressed", key_opt); } }, Event::KeyboardInput(ElementState::Released, _, key_opt) => { let released_key = key_opt.unwrap(); if self.key_map.contains_key(&released_key) { self.key_map.insert(released_key, false); } }, _ => {} } } } pub fn watch(&mut self, key: VirtualKeyCode) { if self.key_map.contains_key(&key) { panic!("watching key that is already tracked"); } self.key_map.insert(key, false); self.key_list.push(key); } pub fn is_pressed(&self, key: VirtualKeyCode) -> bool { if self.key_map.contains_key(&key) == false { panic!("checking keydown for key that isn't being tracked"); } *self.key_map.get(&key).unwrap() } }*/ // Actual tilemap data that makes up the elements of the UBO. // NOTE: It may be a bug, but it appears that // [f32;2] won't work as UBO data. Possibly an issue with // binding generation gfx_defines!{ constant TileMapData { data: [f32; 4] = "data", } constant ProjectionStuff { model: [[f32; 4]; 4] = "u_Model", view: [[f32; 4]; 4] = "u_View", proj: [[f32; 4]; 4] = "u_Proj", } constant TilemapStuff { world_size: [f32; 4] = "u_WorldSize", tilesheet_size: [f32; 4] = "u_TilesheetSize", offsets: [f32; 2] = "u_TileOffsets", } vertex VertexData { pos: [f32; 3] = "a_Pos", buf_pos: [f32; 2] = "a_BufPos", } pipeline pipe { vbuf: gfx::VertexBuffer<VertexData> = (), projection_cb: gfx::ConstantBuffer<ProjectionStuff> = "b_VsLocals", // tilemap stuff tilemap: gfx::ConstantBuffer<TileMapData> = "b_TileMap", tilemap_cb: gfx::ConstantBuffer<TilemapStuff> = "b_PsLocals", tilesheet: gfx::TextureSampler<[f32; 4]> = "t_TileSheet", // output out_color: gfx::RenderTarget<ColorFormat> = "Target0", out_depth: gfx::DepthTarget<DepthFormat> = gfx::preset::depth::LESS_EQUAL_WRITE, } } impl TileMapData { pub fn new_empty() -> TileMapData { TileMapData { data: [0.0, 0.0, 0.0, 0.0] } } pub fn new(data: [f32; 4]) -> TileMapData { TileMapData { data: data } } } // Abstracts the plane mesh and uniform data // Also holds a Vec<TileMapData> as a working data // set for consumers pub struct TileMapPlane<R> where R: gfx::Resources { pub params: pipe::Data<R>, pub slice: gfx::Slice<R>, proj_stuff: ProjectionStuff, proj_dirty: bool, tm_stuff: TilemapStuff, tm_dirty: bool, pub data: Vec<TileMapData>, } impl<R> TileMapPlane<R> where R: gfx::Resources { pub fn new<F>(factory: &mut F, width: usize, height: usize, tile_size: usize, main_color: gfx::handle::RenderTargetView<R, ColorFormat>, main_depth: gfx::handle::DepthStencilView<R, DepthFormat>, aspect_ratio: f32) -> TileMapPlane<R> where F: gfx::Factory<R> { // charmap info let half_width = (tile_size * width) / 2; let half_height = (tile_size * height) / 2; let total_size = width*height; // tilesheet info let tilesheet_bytes = &include_bytes!("scifitiles-sheet_0.png")[..]; let tilesheet_width = 14; let tilesheet_height = 9; let tilesheet_tilesize = 32; let tilesheet_total_width = tilesheet_width * tilesheet_tilesize; let tilesheet_total_height = tilesheet_height * tilesheet_tilesize; // set up vertex data let plane = Plane::subdivide(width, width); // law out the vertices of the plane slice based on the configured tile size information, // setting the a_BufPos vertex data for the vertex shader (that ultimate gets passed through // to the frag shader as a varying, used to determine the "current tile" and the frag's offset, // which is used to calculate the displayed frag color) let vertex_data: Vec<VertexData> = plane.shared_vertex_iter() .map(|(raw_x, raw_y)| { let vertex_x = half_width as f32 * raw_x; let vertex_y = half_height as f32 * raw_y; let u_pos = (1.0 + raw_x) / 2.0; let v_pos = (1.0 + raw_y) / 2.0; let tilemap_x = (u_pos * width as f32).floor(); let tilemap_y = (v_pos * height as f32).floor(); VertexData { pos: [vertex_x, vertex_y, 0.0], buf_pos: [tilemap_x as f32, tilemap_y as f32] } }) .collect(); let index_data: Vec<u32> = plane.indexed_polygon_iter() .triangulate() .vertices() .map(|i| i as u32) .collect(); let (vbuf, slice) = factory.create_vertex_buffer_with_slice(&vertex_data, &index_data[..]); let tile_texture = load_texture(factory, tilesheet_bytes).unwrap(); let params = pipe::Data { vbuf: vbuf, projection_cb: factory.create_constant_buffer(1), tilemap: factory.create_constant_buffer(TILEMAP_BUF_LENGTH), tilemap_cb: factory.create_constant_buffer(1), tilesheet: (tile_texture, factory.create_sampler_linear()), out_color: main_color, out_depth: main_depth, }; let mut charmap_data = Vec::with_capacity(total_size); for _ in 0..total_size { charmap_data.push(TileMapData::new_empty()); } let view: AffineMatrix3<f32> = Transform::look_at( Point3::new(0.0, 0.0, 800.0), Point3::new(0.0, 0.0, 0.0), Vector3::unit_y(), ); TileMapPlane { slice: slice, params: params, proj_stuff: ProjectionStuff { model: Matrix4::identity().into(), view: view.mat.into(), proj: cgmath::perspective(cgmath::deg(60.0f32), aspect_ratio, 0.1, 4000.0).into(), }, proj_dirty: true, tm_stuff: TilemapStuff { world_size: [width as f32, height as f32, tile_size as f32, 0.0], tilesheet_size: [tilesheet_width as f32, tilesheet_height as f32, tilesheet_total_width as f32, tilesheet_total_height as f32], offsets: [0.0, 0.0], }, tm_dirty: true, data: charmap_data, } } fn prepare_buffers<C>(&mut self, encoder: &mut gfx::Encoder<R, C>, update_data: bool) where C: gfx::CommandBuffer<R> { if update_data { encoder.update_buffer(&self.params.tilemap, &self.data, 0).unwrap(); } if self.proj_dirty { encoder.update_constant_buffer(&self.params.projection_cb, &self.proj_stuff); self.proj_dirty = false; } if self.tm_dirty { encoder.update_constant_buffer(&self.params.tilemap_cb, &self.tm_stuff); self.tm_dirty = false; } } fn clear<C>(&self, encoder: &mut gfx::Encoder<R, C>) where C: gfx::CommandBuffer<R> { encoder.clear(&self.params.out_color, [16.0 / 256.0, 14.0 / 256.0, 22.0 / 256.0, 1.0]); encoder.clear_depth(&self.params.out_depth, 1.0); } pub fn update_view(&mut self, view: &AffineMatrix3<f32>) { self.proj_stuff.view = view.mat.into(); self.proj_dirty = true; } pub fn update_x_offset(&mut self, amt: f32) { self.tm_stuff.offsets[0] = amt; self.tm_dirty = true; } pub fn update_y_offset(&mut self, amt: f32) { self.tm_stuff.offsets[1] = amt; self.tm_dirty = true; } } // Encapsulates the TileMapPlane and holds state for the current // visible set of tiles. Is responsible for updating the UBO // within the TileMapData when the visible set of tiles changes pub struct TileMap<R> where R: gfx::Resources { pub tiles: Vec<TileMapData>, pso: gfx::PipelineState<R, pipe::Meta>, tilemap_plane: TileMapPlane<R>, tile_size: f32, tilemap_size: [usize; 2], charmap_size: [usize; 2], limit_coords: [usize; 2], focus_coords: [usize; 2], focus_dirty: bool, } impl<R: gfx::Resources> TileMap<R> { pub fn set_focus(&mut self, focus: [usize; 2]) { if focus[0] <= self.limit_coords[0] && focus[1] <= self.limit_coords[1] { self.focus_coords = focus; let mut charmap_ypos = 0; for ypos in self.focus_coords[1] .. self.focus_coords[1]+self.charmap_size[1] { let mut charmap_xpos = 0; for xpos in self.focus_coords[0] .. self.focus_coords[0]+self.charmap_size[0] { let tile_idx = (ypos * self.tilemap_size[0]) + xpos; let charmap_idx = (charmap_ypos * self.charmap_size[0]) + charmap_xpos; self.tilemap_plane.data[charmap_idx] = self.tiles[tile_idx]; charmap_xpos += 1; } charmap_ypos += 1; } self.focus_dirty = true; } else { panic!("tried to set focus to {:?} with tilemap_size of {:?}", focus, self.tilemap_size); } } pub fn apply_x_offset(&mut self, offset_amt: f32) { let mut new_offset = self.tilemap_plane.tm_stuff.offsets[0] + offset_amt; let curr_focus = self.focus_coords; let new_x = if new_offset < 0.0 { // move down if self.focus_coords[0] == 0 { new_offset = 0.0; 0 } else { new_offset = self.tile_size + new_offset as f32; self.focus_coords[0] - 1 } } else if self.focus_coords[0] == self.limit_coords[0] { // at top, no more offset new_offset = 0.0; self.focus_coords[0] } else if new_offset >= self.tile_size { new_offset = new_offset - self.tile_size as f32; self.focus_coords[0] + 1 } else { // no move self.focus_coords[0] }; if new_x != self.focus_coords[0] { self.set_focus([new_x, curr_focus[1]]); } self.tilemap_plane.update_x_offset(new_offset); } pub fn apply_y_offset(&mut self, offset_amt: f32) { let mut new_offset = self.tilemap_plane.tm_stuff.offsets[1] + offset_amt; let curr_focus = self.focus_coords; let new_y = if new_offset < 0.0 { // move down if self.focus_coords[1] == 0 { new_offset = 0.0; 0 } else { new_offset = self.tile_size + new_offset as f32; self.focus_coords[1] - 1 } } else if self.focus_coords[1] == (self.tilemap_size[1] - self.charmap_size[1]) { // at top, no more offset new_offset = 0.0; self.focus_coords[1] } else if new_offset >= self.tile_size { new_offset = new_offset - self.tile_size as f32; self.focus_coords[1] + 1 } else { // no move self.focus_coords[1] }; if new_y != self.focus_coords[1] { self.set_focus([curr_focus[0], new_y]); } self.tilemap_plane.update_y_offset(new_offset); } fn calc_idx(&self, xpos: usize, ypos: usize) -> usize { (ypos * self.tilemap_size[0]) + xpos } pub fn set_tile(&mut self, xpos: usize, ypos: usize, data: [f32; 4]) { let idx = self.calc_idx(xpos, ypos); self.tiles[idx] = TileMapData::new(data); } } fn populate_tilemap<R>(tilemap: &mut TileMap<R>, tilemap_size: [usize; 2]) where R: gfx::Resources { // paper in with dummy data for ypos in 0 .. tilemap_size[1] { for xpos in 0 .. tilemap_size[0] { tilemap.set_tile(xpos, ypos, [1.0, 7.0, 0.0, 0.0]); } } tilemap.set_tile(1,3,[5.0, 0.0, 0.0, 0.0]); tilemap.set_tile(2,3,[6.0, 0.0, 0.0, 0.0]); tilemap.set_tile(3,3,[7.0, 0.0, 0.0, 0.0]); tilemap.set_tile(1,2,[5.0, 1.0, 0.0, 0.0]); tilemap.set_tile(2,2,[4.0, 0.0, 0.0, 0.0]); tilemap.set_tile(3,2,[11.0, 2.0, 0.0, 0.0]); tilemap.set_tile(1,1,[5.0, 2.0, 0.0, 0.0]); tilemap.set_tile(2,1,[6.0, 2.0, 0.0, 0.0]); tilemap.set_tile(3,1,[7.0, 2.0, 0.0, 0.0]); tilemap.set_tile(1,0,[4.0, 7.0, 0.0, 0.0]); tilemap.set_tile(2,0,[4.0, 7.0, 0.0, 0.0]); tilemap.set_tile(3,0,[4.0, 7.0, 0.0, 0.0]); tilemap.set_tile(4,2,[4.0, 2.0, 0.0, 0.0]); tilemap.set_tile(5,2,[4.0, 2.0, 0.0, 0.0]); tilemap.set_tile(6,2,[11.0, 1.0, 0.0, 0.0]); tilemap.set_tile(4,1,[4.0, 7.0, 0.0, 0.0]); tilemap.set_tile(5,1,[4.0, 7.0, 0.0, 0.0]); tilemap.set_tile(6,1,[4.0, 7.0, 0.0, 0.0]); tilemap.set_tile(6,3,[4.0, 1.0, 0.0, 0.0]); tilemap.set_tile(6,4,[4.0, 1.0, 0.0, 0.0]); tilemap.set_tile(6,5,[4.0, 1.0, 0.0, 0.0]); tilemap.set_tile(6,6,[4.0, 1.0, 0.0, 0.0]); tilemap.set_tile(6,7,[4.0, 1.0, 0.0, 0.0]); tilemap.set_tile(5,10,[5.0, 0.0, 0.0, 0.0]); tilemap.set_tile(7,10,[7.0, 0.0, 0.0, 0.0]); tilemap.set_tile(5,9,[5.0, 1.0, 0.0, 0.0]); tilemap.set_tile(6,9,[6.0, 1.0, 0.0, 0.0]); tilemap.set_tile(7,9,[7.0, 1.0, 0.0, 0.0]); tilemap.set_tile(5,8,[5.0, 2.0, 0.0, 0.0]); tilemap.set_tile(6,8,[8.0, 2.0, 0.0, 0.0]); tilemap.set_tile(7,8,[7.0, 2.0, 0.0, 0.0]); tilemap.set_tile(5,7,[2.0, 1.0, 0.0, 0.0]); tilemap.set_tile(7,7,[2.0, 1.0, 0.0, 0.0]); tilemap.set_tile(6,10,[2.0, 3.0, 0.0, 0.0]); tilemap.set_tile(6,11,[2.0, 2.0, 0.0, 0.0]); } impl<R: gfx::Resources> gfx_app::Application<R> for TileMap<R> { fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self { use gfx::traits::FactoryExt; let vs = gfx_app::shade::Source { glsl_150: include_bytes!("shader/tilemap_150.glslv"), hlsl_40: include_bytes!("data/vertex.fx"), .. gfx_app::shade::Source::empty() }; let ps = gfx_app::shade::Source { glsl_150: include_bytes!("shader/tilemap_150.glslf"), hlsl_40: include_bytes!("data/pixel.fx"), .. gfx_app::shade::Source::empty() }; // set up charmap plane and configure its tiles let tilemap_size = [24, 24]; let charmap_size = [16, 16]; let tile_size = 32; let mut tiles = Vec::new(); for _ in 0 .. tilemap_size[0]*tilemap_size[1] { tiles.push(TileMapData::new_empty()); } // TODO: should probably check that charmap is smaller than tilemap let mut tm = TileMap { tiles: tiles, pso: factory.create_pipeline_simple( vs.select(init.backend).unwrap(), ps.select(init.backend).unwrap(), pipe::new() ).unwrap(), tilemap_plane: TileMapPlane::new(&mut factory, charmap_size[0], charmap_size[1], tile_size, init.color, init.depth, init.aspect_ratio), tile_size: tile_size as f32, tilemap_size: tilemap_size, charmap_size: charmap_size, limit_coords: [tilemap_size[0] - charmap_size[0], tilemap_size[1] - charmap_size[1]], focus_coords: [0, 0], focus_dirty: false, }; populate_tilemap(&mut tm, tilemap_size); tm.set_focus([0, 0]); tm } fn render<C: gfx::CommandBuffer<R>>(&mut self, encoder: &mut gfx::Encoder<R, C>) { //self.tilemap_plane.update_view(view); self.tilemap_plane.prepare_buffers(encoder, self.focus_dirty); self.focus_dirty = false; self.tilemap_plane.clear(encoder); encoder.draw(&self.tilemap_plane.slice, &self.pso, &self.tilemap_plane.params); } } pub fn main() { use gfx_app::Application; TileMap::launch_default("Tilemap example"); } /*pub fn main() { // reusable variables for camera position let mut distance = 800.0; let mut x_pos = 0.0; let mut y_pos = 0.0; let move_amt = 10.0; let offset_amt = 1.0; // input handling let mut handler = InputHandler::new(); handler.watch(glutin::VirtualKeyCode::Escape); handler.watch(glutin::VirtualKeyCode::Up); handler.watch(glutin::VirtualKeyCode::Down); handler.watch(glutin::VirtualKeyCode::Left); handler.watch(glutin::VirtualKeyCode::Right); handler.watch(glutin::VirtualKeyCode::Equals); handler.watch(glutin::VirtualKeyCode::Minus); handler.watch(glutin::VirtualKeyCode::W); handler.watch(glutin::VirtualKeyCode::S); handler.watch(glutin::VirtualKeyCode::A); handler.watch(glutin::VirtualKeyCode::D); 'main: loop { // input handler handler.update(window.poll_events()); // quit when Esc is pressed. if handler.is_pressed(glutin::VirtualKeyCode::Escape) { break 'main; } // zooming in/out if handler.is_pressed(glutin::VirtualKeyCode::Equals) { distance -= move_amt; } if handler.is_pressed(glutin::VirtualKeyCode::Minus) { distance += move_amt; } // panning around if handler.is_pressed(glutin::VirtualKeyCode::Up) { y_pos -= move_amt; } if handler.is_pressed(glutin::VirtualKeyCode::Down) { y_pos += move_amt; } if handler.is_pressed(glutin::VirtualKeyCode::Left) { x_pos -= move_amt; } if handler.is_pressed(glutin::VirtualKeyCode::Right) { x_pos += move_amt; } if handler.is_pressed(glutin::VirtualKeyCode::W) { tilemap.apply_y_offset(&mut factory, offset_amt); } if handler.is_pressed(glutin::VirtualKeyCode::S) { tilemap.apply_y_offset(&mut factory, -offset_amt); } if handler.is_pressed(glutin::VirtualKeyCode::D) { tilemap.apply_x_offset(&mut factory, offset_amt); } if handler.is_pressed(glutin::VirtualKeyCode::A) { tilemap.apply_x_offset(&mut factory, -offset_amt); } // view configuration based on current position let view: AffineMatrix3<f32> = Transform::look_at( Point3::new(x_pos, -y_pos, distance), Point3::new(x_pos, -y_pos, 0.0), Vector3::unit_y(), ); */<|fim▁end|>
use genmesh::generators::{Plane, SharedVertex, IndexedPolygon};
<|file_name|>groupbylazy.rs<|end_file_name|><|fim▁begin|>use std::cell::{Cell, RefCell}; use std::vec; /// A trait to unify FnMut for GroupBy with the chunk key in IntoChunks trait KeyFunction<A> { type Key; fn call_mut(&mut self, arg: A) -> Self::Key; } impl<'a, A, K, F: ?Sized> KeyFunction<A> for F where F: FnMut(A) -> K { type Key = K; #[inline] fn call_mut(&mut self, arg: A) -> Self::Key { (*self)(arg) } } /// ChunkIndex acts like the grouping key function for IntoChunks struct ChunkIndex { size: usize, index: usize, key: usize, } impl ChunkIndex { #[inline(always)] fn new(size: usize) -> Self { ChunkIndex { size: size, index: 0, key: 0, } } } impl<'a, A> KeyFunction<A> for ChunkIndex { type Key = usize; #[inline(always)] fn call_mut(&mut self, _arg: A) -> Self::Key { if self.index == self.size { self.key += 1; self.index = 0; } self.index += 1; self.key } } struct GroupInner<K, I, F> where I: Iterator { key: F, iter: I, current_key: Option<K>, current_elt: Option<I::Item>, /// flag set if iterator is exhausted done: bool, /// Index of group we are currently buffering or visiting top_group: usize, /// Least index for which we still have elements buffered oldest_buffered_group: usize, /// Group index for `buffer[0]` -- the slots /// bottom_group..oldest_buffered_group are unused and will be erased when /// that range is large enough. bottom_group: usize, /// Buffered groups, from `bottom_group` (index 0) to `top_group`. buffer: Vec<vec::IntoIter<I::Item>>, /// index of last group iter that was dropped, usize::MAX == none dropped_group: usize, } impl<K, I, F> GroupInner<K, I, F> where I: Iterator, F: for<'a> KeyFunction<&'a I::Item, Key=K>, K: PartialEq, { /// `client`: Index of group that requests next element #[inline(always)] fn step(&mut self, client: usize) -> Option<I::Item> { /* println!("client={}, bottom_group={}, oldest_buffered_group={}, top_group={}, buffers=[{}]", client, self.bottom_group, self.oldest_buffered_group, self.top_group, self.buffer.iter().map(|elt| elt.len()).format(", ")); */ if client < self.oldest_buffered_group { None } else if client < self.top_group || (client == self.top_group && self.buffer.len() > self.top_group - self.bottom_group) { self.lookup_buffer(client) } else if self.done { None } else if self.top_group == client { self.step_current() } else { self.step_buffering(client) } } #[inline(never)] fn lookup_buffer(&mut self, client: usize) -> Option<I::Item> { // if `bufidx` doesn't exist in self.buffer, it might be empty let bufidx = client - self.bottom_group; if client < self.oldest_buffered_group { return None; } let elt = self.buffer.get_mut(bufidx).and_then(|queue| queue.next()); if elt.is_none() && client == self.oldest_buffered_group { // FIXME: VecDeque is unfortunately not zero allocation when empty, // so we do this job manually. // `bottom_group..oldest_buffered_group` is unused, and if it's large enough, erase it. self.oldest_buffered_group += 1; // skip forward further empty queues too while self.buffer.get(self.oldest_buffered_group - self.bottom_group) .map_or(false, |buf| buf.len() == 0) { self.oldest_buffered_group += 1; } let nclear = self.oldest_buffered_group - self.bottom_group; if nclear > 0 && nclear >= self.buffer.len() / 2 { let mut i = 0; self.buffer.retain(|buf| { i += 1; debug_assert!(buf.len() == 0 || i > nclear); i > nclear }); self.bottom_group = self.oldest_buffered_group; } } elt } /// Take the next element from the iterator, and set the done /// flag if exhausted. Must not be called after done. #[inline(always)] fn next_element(&mut self) -> Option<I::Item> { debug_assert!(!self.done); match self.iter.next() { None => { self.done = true; None } otherwise => otherwise, } } #[inline(never)] fn step_buffering(&mut self, client: usize) -> Option<I::Item> { // requested a later group -- walk through the current group up to // the requested group index, and buffer the elements (unless // the group is marked as dropped). // Because the `Groups` iterator is always the first to request // each group index, client is the next index efter top_group. debug_assert!(self.top_group + 1 == client); let mut group = Vec::new(); if let Some(elt) = self.current_elt.take() { if self.top_group != self.dropped_group { group.push(elt); } } let mut first_elt = None; // first element of the next group while let Some(elt) = self.next_element() { let key = self.key.call_mut(&elt); match self.current_key.take() { None => {} Some(old_key) => if old_key != key { self.current_key = Some(key); first_elt = Some(elt); break; }, } self.current_key = Some(key); if self.top_group != self.dropped_group { group.push(elt); } } if self.top_group != self.dropped_group { self.push_next_group(group); } if first_elt.is_some() { self.top_group += 1; debug_assert!(self.top_group == client); } first_elt } fn push_next_group(&mut self, group: Vec<I::Item>) { // When we add a new buffered group, fill up slots between oldest_buffered_group and top_group while self.top_group - self.bottom_group > self.buffer.len() { if self.buffer.is_empty() { self.bottom_group += 1; self.oldest_buffered_group += 1; } else { self.buffer.push(Vec::new().into_iter()); } } self.buffer.push(group.into_iter()); debug_assert!(self.top_group + 1 - self.bottom_group == self.buffer.len()); } /// This is the immediate case, where we use no buffering #[inline] fn step_current(&mut self) -> Option<I::Item> { debug_assert!(!self.done); if let elt @ Some(..) = self.current_elt.take() { return elt; } match self.next_element() { None => None, Some(elt) => { let key = self.key.call_mut(&elt); match self.current_key.take() { None => {} Some(old_key) => if old_key != key { self.current_key = Some(key); self.current_elt = Some(elt); self.top_group += 1; return None; }, } self.current_key = Some(key); Some(elt) } } } /// Request the just started groups' key. /// /// `client`: Index of group /// /// **Panics** if no group key is available. fn group_key(&mut self, client: usize) -> K { // This can only be called after we have just returned the first // element of a group. // Perform this by simply buffering one more element, grabbing the // next key. debug_assert!(!self.done); debug_assert!(client == self.top_group); debug_assert!(self.current_key.is_some()); debug_assert!(self.current_elt.is_none()); let old_key = self.current_key.take().unwrap(); if let Some(elt) = self.next_element() { let key = self.key.call_mut(&elt); if old_key != key { self.top_group += 1; } self.current_key = Some(key); self.current_elt = Some(elt); } old_key } } impl<K, I, F> GroupInner<K, I, F> where I: Iterator, { /// Called when a group is dropped fn drop_group(&mut self, client: usize) { // It's only useful to track the maximal index if self.dropped_group == !0 || client > self.dropped_group { self.dropped_group = client; } } } /// `GroupBy` is the storage for the lazy grouping operation. /// /// If the groups are consumed in their original order, or if each /// group is dropped without keeping it around, then `GroupBy` uses /// no allocations. It needs allocations only if several group iterators /// are alive at the same time. /// /// This type implements `IntoIterator` (it is **not** an iterator /// itself), because the group iterators need to borrow from this /// value. It should be stored in a local variable or temporary and /// iterated. /// /// See [`.group_by()`](../trait.Itertools.html#method.group_by) for more information. pub struct GroupBy<K, I, F> where I: Iterator, { inner: RefCell<GroupInner<K, I, F>>, // the group iterator's current index. Keep this in the main value // so that simultaneous iterators all use the same state. index: Cell<usize>, } /// Create a new pub fn new<K, J, F>(iter: J, f: F) -> GroupBy<K, J::IntoIter, F> where J: IntoIterator, F: FnMut(&J::Item) -> K, { GroupBy { inner: RefCell::new(GroupInner { key: f, iter: iter.into_iter(), current_key: None, current_elt: None, done: false, top_group: 0, oldest_buffered_group: 0, bottom_group: 0, buffer: Vec::new(), dropped_group: !0, }), index: Cell::new(0), } } impl<K, I, F> GroupBy<K, I, F> where I: Iterator, { /// `client`: Index of group that requests next element fn step(&self, client: usize) -> Option<I::Item> where F: FnMut(&I::Item) -> K, K: PartialEq, { self.inner.borrow_mut().step(client) } /// `client`: Index of group fn drop_group(&self, client: usize) { self.inner.borrow_mut().drop_group(client) } } impl<'a, K, I, F> IntoIterator for &'a GroupBy<K, I, F> where I: Iterator, I::Item: 'a, F: FnMut(&I::Item) -> K, K: PartialEq { type Item = (K, Group<'a, K, I, F>); type IntoIter = Groups<'a, K, I, F>; fn into_iter(self) -> Self::IntoIter { Groups { parent: self } } } /// An iterator that yields the Group iterators. /// /// Iterator element type is `(K, Group)`: /// the group's key `K` and the group's iterator. /// /// See [`.group_by()`](../trait.Itertools.html#method.group_by) for more information. pub struct Groups<'a, K: 'a, I: 'a, F: 'a> where I: Iterator, I::Item: 'a { parent: &'a GroupBy<K, I, F>, } impl<'a, K, I, F> Iterator for Groups<'a, K, I, F> where I: Iterator, I::Item: 'a, F: FnMut(&I::Item) -> K, K: PartialEq { type Item = (K, Group<'a, K, I, F>); #[inline] fn next(&mut self) -> Option<Self::Item> { let index = self.parent.index.get(); self.parent.index.set(index + 1); let inner = &mut *self.parent.inner.borrow_mut(); inner.step(index).map(|elt| { let key = inner.group_key(index); (key, Group { parent: self.parent, index: index, first: Some(elt), }) }) } } /// An iterator for the elements in a single group. /// /// Iterator element type is `I::Item`. pub struct Group<'a, K: 'a, I: 'a, F: 'a> where I: Iterator, I::Item: 'a, { parent: &'a GroupBy<K, I, F>, index: usize, first: Option<I::Item>, } impl<'a, K, I, F> Drop for Group<'a, K, I, F> where I: Iterator, I::Item: 'a, { fn drop(&mut self) { self.parent.drop_group(self.index); } } impl<'a, K, I, F> Iterator for Group<'a, K, I, F> where I: Iterator, I::Item: 'a, F: FnMut(&I::Item) -> K, K: PartialEq, { type Item = I::Item; #[inline] fn next(&mut self) -> Option<Self::Item> { if let elt @ Some(..) = self.first.take() { return elt; } self.parent.step(self.index) } } ///// IntoChunks ///// /// Create a new pub fn new_chunks<J>(iter: J, size: usize) -> IntoChunks<J::IntoIter> where J: IntoIterator, { IntoChunks { inner: RefCell::new(GroupInner { key: ChunkIndex::new(size), iter: iter.into_iter(), current_key: None, current_elt: None, done: false, top_group: 0, oldest_buffered_group: 0, bottom_group: 0, buffer: Vec::new(), dropped_group: !0, }), index: Cell::new(0), } } /// `ChunkLazy` is the storage for a lazy chunking operation. /// /// `IntoChunks` behaves just like `GroupBy`: it is iterable, and /// it only buffers if several chunk iterators are alive at the same time. /// /// This type implements `IntoIterator` (it is **not** an iterator /// itself), because the chunk iterators need to borrow from this /// value. It should be stored in a local variable or temporary and /// iterated. /// /// Iterator element type is `Chunk`, each chunk's iterator. /// /// See [`.chunks()`](../trait.Itertools.html#method.chunks) for more information. pub struct IntoChunks<I> where I: Iterator, { inner: RefCell<GroupInner<usize, I, ChunkIndex>>, // the chunk iterator's current index. Keep this in the main value // so that simultaneous iterators all use the same state. index: Cell<usize>, } impl<I> IntoChunks<I> where I: Iterator, { /// `client`: Index of chunk that requests next element fn step(&self, client: usize) -> Option<I::Item> { self.inner.borrow_mut().step(client) } /// `client`: Index of chunk fn drop_group(&self, client: usize) { self.inner.borrow_mut().drop_group(client) } } impl<'a, I> IntoIterator for &'a IntoChunks<I> where I: Iterator, I::Item: 'a, { type Item = Chunk<'a, I>; type IntoIter = Chunks<'a, I>; fn into_iter(self) -> Self::IntoIter { Chunks { parent: self, } } } /// An iterator that yields the Chunk iterators. /// /// Iterator element type is `Chunk`. /// /// See [`.chunks()`](../trait.Itertools.html#method.chunks) for more information. pub struct Chunks<'a, I: 'a> where I: Iterator, I::Item: 'a, { parent: &'a IntoChunks<I>, } impl<'a, I> Iterator for Chunks<'a, I> where I: Iterator, I::Item: 'a, { type Item = Chunk<'a, I>; #[inline] fn next(&mut self) -> Option<Self::Item> { let index = self.parent.index.get(); self.parent.index.set(index + 1); let inner = &mut *self.parent.inner.borrow_mut(); inner.step(index).map(|elt| { Chunk { parent: self.parent, index: index,<|fim▁hole|> first: Some(elt), } }) } } /// An iterator for the elements in a single chunk. /// /// Iterator element type is `I::Item`. pub struct Chunk<'a, I: 'a> where I: Iterator, I::Item: 'a, { parent: &'a IntoChunks<I>, index: usize, first: Option<I::Item>, } impl<'a, I> Drop for Chunk<'a, I> where I: Iterator, I::Item: 'a, { fn drop(&mut self) { self.parent.drop_group(self.index); } } impl<'a, I> Iterator for Chunk<'a, I> where I: Iterator, I::Item: 'a, { type Item = I::Item; #[inline] fn next(&mut self) -> Option<Self::Item> { if let elt @ Some(..) = self.first.take() { return elt; } self.parent.step(self.index) } }<|fim▁end|>
<|file_name|>AtomicVolatilePartitionCounterStateConsistencyTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.transactions; import java.util.Collection; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.junit.Ignore; /** */ public class AtomicVolatilePartitionCounterStateConsistencyTest extends AtomicPartitionCounterStateConsistencyTest { /** {@inheritDoc} */ @Override protected boolean persistenceEnabled() { return false; } /** {@inheritDoc} */ @Override protected int partitions() { return 1024; } /** {@inheritDoc} */ @Ignore @Override public void testSingleThreadedUpdateOrder() throws Exception {<|fim▁hole|> // Not applicable for volatile mode. } /** {@inheritDoc} */ @Ignore @Override public void testPartitionConsistencyCancelledRebalanceCoordinatorIsDemander() throws Exception { // Not applicable for volatile mode. } /** {@inheritDoc} */ @Ignore @Override public void testLateAffinityChangeDuringExchange() throws Exception { // Not applicable for volatile mode. } /** {@inheritDoc} */ @Override protected void forceCheckpoint(Collection<Ignite> nodes) throws IgniteCheckedException { // No-op. } }<|fim▁end|>
<|file_name|>outlet.ts<|end_file_name|><|fim▁begin|>import { Simple } from '@glimmer/interfaces'; import { DirtyableTag, Tag, TagWrapper, VersionedPathReference } from '@glimmer/reference'; import { Opaque, Option } from '@glimmer/util'; import { environment } from 'ember-environment'; import { run } from 'ember-metal'; import { assign, OWNER } from 'ember-utils'; import { Renderer } from '../renderer'; import { Container, OwnedTemplate } from '../template'; export class RootOutletStateReference implements VersionedPathReference<Option<OutletState>> { tag: Tag; constructor(public outletView: OutletView) { this.tag = outletView._tag; } get(key: string): VersionedPathReference<any> { return new ChildOutletStateReference(this, key); } value(): Option<OutletState> { return this.outletView.outletState; } getOrphan(name: string): VersionedPathReference<Option<OutletState>> { return new OrphanedOutletStateReference(this, name); } update(state: OutletState) { this.outletView.setOutletState(state); } } // So this is a relic of the past that SHOULD go away // in 3.0. Preferably it is deprecated in the release that // follows the Glimmer release. class OrphanedOutletStateReference extends RootOutletStateReference { public root: any; public name: string; constructor(root: RootOutletStateReference, name: string) { super(root.outletView); this.root = root; this.name = name; }<|fim▁hole|> value(): Option<OutletState> { let rootState = this.root.value(); let orphans = rootState.outlets.main.outlets.__ember_orphans__; if (!orphans) { return null; } let matched = orphans.outlets[this.name]; if (!matched) { return null; } let state = Object.create(null); state[matched.render.outlet] = matched; matched.wasUsed = true; return { outlets: state, render: undefined }; } } class ChildOutletStateReference implements VersionedPathReference<any> { public parent: VersionedPathReference<any>; public key: string; public tag: Tag; constructor(parent: VersionedPathReference<any>, key: string) { this.parent = parent; this.key = key; this.tag = parent.tag; } get(key: string): VersionedPathReference<any> { return new ChildOutletStateReference(this, key); } value(): any { let parent = this.parent.value(); return parent && parent[this.key]; } } export interface RenderState { owner: Container | undefined; into: string | undefined; outlet: string; name: string; controller: Opaque; template: OwnedTemplate | undefined; } export interface OutletState { outlets: { [name: string]: OutletState | undefined; }; render: RenderState | undefined; } export interface BootEnvironment { hasDOM: boolean; isInteractive: boolean; options: any; } export default class OutletView { private _environment: BootEnvironment; public renderer: Renderer; public owner: Container; public template: OwnedTemplate; public outletState: Option<OutletState>; public _tag: TagWrapper<DirtyableTag>; static extend(injections: any) { return class extends OutletView { static create(options: any) { if (options) { return super.create(assign({}, injections, options)); } else { return super.create(injections); } } }; } static reopenClass(injections: any) { assign(this, injections); } static create(options: any) { let { _environment, renderer, template } = options; let owner = options[OWNER]; return new OutletView(_environment, renderer, owner, template); } constructor(_environment: BootEnvironment, renderer: Renderer, owner: Container, template: OwnedTemplate) { this._environment = _environment; this.renderer = renderer; this.owner = owner; this.template = template; this.outletState = null; this._tag = DirtyableTag.create(); } appendTo(selector: string | Simple.Element) { let env = this._environment || environment; let target; if (env.hasDOM) { target = typeof selector === 'string' ? document.querySelector(selector) : selector; } else { target = selector; } run.schedule('render', this.renderer, 'appendOutletView', this, target); } rerender() { /**/ } setOutletState(state: OutletState) { this.outletState = { outlets: { main: state, }, render: { owner: undefined, into: undefined, outlet: 'main', name: '-top-level', controller: undefined, template: undefined, }, }; this._tag.inner.dirty(); } toReference() { return new RootOutletStateReference(this); } destroy() { /**/ } }<|fim▁end|>
<|file_name|>juce_MemoryInputStream.cpp<|end_file_name|><|fim▁begin|>/* ============================================================================== This file is part of the JUCE library. Copyright (c) 2017 - ROLI Ltd. JUCE is an open source library subject to commercial or open-source licensing. The code included in this file is provided under the terms of the ISC license http://www.isc.org/downloads/software-support-policy/isc-license. Permission To use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted provided that the above copyright notice and this permission notice appear in all copies. JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE DISCLAIMED. ============================================================================== */ namespace juce { MemoryInputStream::MemoryInputStream (const void* sourceData, size_t sourceDataSize, bool keepCopy) : data (sourceData), dataSize (sourceDataSize) { if (keepCopy) { internalCopy = MemoryBlock (sourceData, sourceDataSize); data = internalCopy.getData(); } } MemoryInputStream::MemoryInputStream (const MemoryBlock& sourceData, bool keepCopy) : data (sourceData.getData()), dataSize (sourceData.getSize()) { if (keepCopy) { internalCopy = sourceData; data = internalCopy.getData(); } } MemoryInputStream::MemoryInputStream (MemoryBlock&& source) : internalCopy (std::move (source)) { data = internalCopy.getData(); } MemoryInputStream::~MemoryInputStream() { } int64 MemoryInputStream::getTotalLength() { return (int64) dataSize; } int MemoryInputStream::read (void* buffer, int howMany) { jassert (buffer != nullptr && howMany >= 0); if (howMany <= 0 || position >= dataSize) return 0; auto num = jmin ((size_t) howMany, dataSize - position); if (num > 0) { memcpy (buffer, addBytesToPointer (data, position), num); position += num; } return (int) num; } bool MemoryInputStream::isExhausted() { return position >= dataSize; } bool MemoryInputStream::setPosition (const int64 pos) { position = (size_t) jlimit ((int64) 0, (int64) dataSize, pos); return true; } int64 MemoryInputStream::getPosition() { return (int64) position; } void MemoryInputStream::skipNextBytes (int64 numBytesToSkip) { if (numBytesToSkip > 0) setPosition (getPosition() + numBytesToSkip); } //============================================================================== //============================================================================== #if JUCE_UNIT_TESTS class MemoryStreamTests : public UnitTest { public: MemoryStreamTests() : UnitTest ("MemoryInputStream & MemoryOutputStream", UnitTestCategories::streams) {} void runTest() override { beginTest ("Basics"); Random r = getRandom(); int randomInt = r.nextInt(); int64 randomInt64 = r.nextInt64(); double randomDouble = r.nextDouble(); String randomString (createRandomWideCharString (r)); MemoryOutputStream mo; mo.writeInt (randomInt); mo.writeIntBigEndian (randomInt); mo.writeCompressedInt (randomInt); mo.writeString (randomString); mo.writeInt64 (randomInt64); mo.writeInt64BigEndian (randomInt64); mo.writeDouble (randomDouble); mo.writeDoubleBigEndian (randomDouble); MemoryInputStream mi (mo.getData(), mo.getDataSize(), false); expect (mi.readInt() == randomInt); expect (mi.readIntBigEndian() == randomInt); expect (mi.readCompressedInt() == randomInt); expectEquals (mi.readString(), randomString); expect (mi.readInt64() == randomInt64); expect (mi.readInt64BigEndian() == randomInt64); expect (mi.readDouble() == randomDouble); expect (mi.readDoubleBigEndian() == randomDouble); const MemoryBlock data ("abcdefghijklmnopqrstuvwxyz", 26); MemoryInputStream stream (data, true); beginTest ("Read"); expectEquals (stream.getPosition(), (int64) 0); expectEquals (stream.getTotalLength(), (int64) data.getSize()); expectEquals (stream.getNumBytesRemaining(), stream.getTotalLength()); expect (! stream.isExhausted()); size_t numBytesRead = 0; MemoryBlock readBuffer (data.getSize()); <|fim▁hole|> numBytesRead += (size_t) stream.read (&readBuffer[numBytesRead], 3); expectEquals (stream.getPosition(), (int64) numBytesRead); expectEquals (stream.getNumBytesRemaining(), (int64) (data.getSize() - numBytesRead)); expect (stream.isExhausted() == (numBytesRead == data.getSize())); } expectEquals (stream.getPosition(), (int64) data.getSize()); expectEquals (stream.getNumBytesRemaining(), (int64) 0); expect (stream.isExhausted()); expect (readBuffer == data); beginTest ("Skip"); stream.setPosition (0); expectEquals (stream.getPosition(), (int64) 0); expectEquals (stream.getTotalLength(), (int64) data.getSize()); expectEquals (stream.getNumBytesRemaining(), stream.getTotalLength()); expect (! stream.isExhausted()); numBytesRead = 0; const int numBytesToSkip = 5; while (numBytesRead < data.getSize()) { stream.skipNextBytes (numBytesToSkip); numBytesRead += numBytesToSkip; numBytesRead = std::min (numBytesRead, data.getSize()); expectEquals (stream.getPosition(), (int64) numBytesRead); expectEquals (stream.getNumBytesRemaining(), (int64) (data.getSize() - numBytesRead)); expect (stream.isExhausted() == (numBytesRead == data.getSize())); } expectEquals (stream.getPosition(), (int64) data.getSize()); expectEquals (stream.getNumBytesRemaining(), (int64) 0); expect (stream.isExhausted()); } static String createRandomWideCharString (Random& r) { juce_wchar buffer [50] = { 0 }; for (int i = 0; i < numElementsInArray (buffer) - 1; ++i) { if (r.nextBool()) { do { buffer[i] = (juce_wchar) (1 + r.nextInt (0x10ffff - 1)); } while (! CharPointer_UTF16::canRepresent (buffer[i])); } else buffer[i] = (juce_wchar) (1 + r.nextInt (0xff)); } return CharPointer_UTF32 (buffer); } }; static MemoryStreamTests memoryInputStreamUnitTests; #endif } // namespace juce<|fim▁end|>
while (numBytesRead < data.getSize()) {
<|file_name|>MotivationBtnView.js<|end_file_name|><|fim▁begin|>define([ 'backbone', 'metro', 'util' ], function(Backbone, Metro, Util) { var MotivationBtnView = Backbone.View.extend({ className: 'motivation-btn-view menu-btn', events: { 'click': 'toggle', 'mouseover': 'over', 'mouseout': 'out', }, initialize: function(){ //ensure correct scope _.bindAll(this, 'render', 'unrender', 'toggle', 'over', 'out'); //initial param this.motivationView = new MotivationView(); //add to page this.render(); }, render: function() { var $button = $('<span class="mif-compass">'); $(this.el).html($button); $(this.el).attr('title', 'motivation...'); $('body > .container').append($(this.el)); return this; }, unrender: function() { this.drawElementsView.unrender(); $(this.el).remove(); }, toggle: function() { this.drawElementsView.toggle(); }, over: function() { $(this.el).addClass('expand'); }, out: function() { $(this.el).removeClass('expand'); } });<|fim▁hole|> className: 'motivation-view', events: { 'click .draw': 'draw', 'click .clean': 'clean', 'change .input-type > select': 'clean' }, initialize: function(){ //ensure correct scope _.bindAll(this, 'render', 'unrender', 'toggle', 'drawMotivation', 'drawGPS', 'drawAssignedSection', 'drawAugmentedSection'); //motivation param this.param = {}; this.param.o_lng = 114.05604600906372; this.param.o_lat = 22.551225247189432; this.param.d_lng = 114.09120440483093; this.param.d_lat = 22.545463347318833; this.param.path = "33879,33880,33881,33882,33883,33884,33885,41084,421,422,423,2383,2377,2376,2334,2335,2565,2566,2567,2568,2569,2570,2571,2572,2573,39716,39717,39718,39719,39720,39721,39722,39723,448,39677,39678"; //GPS param this.param.gps = "114.05538082122803,22.551086528436926#114.05844390392303,22.551324331927283#114.06151771545409,22.551264881093118#114.06260132789612,22.54908499948478#114.06269788742065,22.5456862971879#114.06271934509277,22.54315951091646#114.06271934509277,22.538938188315093#114.06284809112547,22.53441944644356"; //assigned section param this.param.assign = "33878,33881,33883,2874,2877,2347,937,941"; //augmented section param //33878,33879,33880,33881,33882,33883,2874,2875,2876,2877,2878,2347,935,936,937,938,939,940,941, this.param.augment = "33879,33880,33882,2875,2876,2878,935,936,938,939,940"; //add to page this.render(); }, render: function() { //this.drawMotivation(); this.drawAssignedSection(); this.drawAugmentedSection(); this.drawGPS(); return this; }, unrender: function() { $(this.el).remove(); }, toggle: function() { $(this.el).css('display') == 'none' ? $(this.el).show() : $(this.el).hide(); }, drawMotivation: function() { $.get('api/trajectories/motivation', this.param, function(data){ Backbone.trigger('MapView:drawMotivation', data); }); }, drawGPS: function() { var self = this; setTimeout(function() { var points = self.param.gps.split('#'); _.each(points, function(point_text, index) { var data = {}; data.geojson = self._getPoint(point_text); data.options = {}; Backbone.trigger('MapView:drawSampleGPSPoint', data); }); }, 2000); }, drawAssignedSection: function() { $.get('api/elements/sections', {id: this.param.assign}, function(data){ Backbone.trigger('MapView:drawSampleAssignedSection', data); }); }, drawAugmentedSection: function() { $.get('api/elements/sections', {id: this.param.augment}, function(data){ Backbone.trigger('MapView:drawSampleAugmentedSection', data); }); }, _getPoint: function(text) { var point = text.split(','); var geojson = { "type": "FeatureCollection", "features":[{ "type": "Feature", "geometry": { "type": "Point", "coordinates": [parseFloat(point[0]), parseFloat(point[1])] } }] }; return geojson; }, }); return MotivationBtnView; });<|fim▁end|>
var MotivationView = Backbone.View.extend({
<|file_name|>DehnenMcLaughlin.cc<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------------+ // | // DehnenMcLaughlin.cc | // | // Copyright (C) 2004-2006 Walter Dehnen | // | // This program is free software; you can redistribute it and/or modify | // it under the terms of the GNU General Public License as published by | // the Free Software Foundation; either version 2 of the License, or (at | // your option) any later version. | // |<|fim▁hole|>// | // You should have received a copy of the GNU General Public License | // along with this program; if not, write to the Free Software | // Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. | // | //-----------------------------------------------------------------------------+ // | // Versions | // 0.1 09/08/2006 WD use $NEMOINC/defacc.h | //-----------------------------------------------------------------------------+ #undef POT_DEF #include <iostream> #include <utils/WDMath.h> #include <defacc.h> // $NEMOINC/defacc.h //////////////////////////////////////////////////////////////////////////////// namespace { using namespace WDutils; ////////////////////////////////////////////////////////////////////////////// class DehnenMcLaughlin { const double GM,a,b0,e; // defininng parameters const double ia,nM,eta,kk,Pf; // derived parameters const BetaFunc BPs; // Beta_u( 1/eta, (1-beta0)/eta + 1/2 ) public: static const char* name() { return "DehnenMcLaughlin"; } DehnenMcLaughlin(const double*pars, int npar, const char *file) : GM ( npar>1? -std::abs(pars[1]) : -1. ), a ( npar>2? pars[2] : 1. ), b0 ( npar>3? pars[3] : 0. ), e ( npar>4? pars[4] : 3. ), ia ( 1./a ), nM ( (e+2)/(e-2) ), eta ( 2*(e-2)*(2-b0)/(6+e) ), kk ( eta*nM-3 ), Pf ( GM*ia/eta ), BPs ( 1./eta, (1-b0)/eta+0.5 ) { if(npar>5 || debug(2)) std::cerr<< "falcON Debug Info: acceleration \"DehnenMcLaughlin\"" "recognizing 5 parameters:\n" " omega pattern speed (ignored) [0]\n" " G*M mass; [1]\n" " a scale radius; [1]\n" " b0 inner halo anisotropy [0]\n" " e assume rho/sigma_r^e is power law [3]\n" " the potential is given by the density\n\n" " 4+eta-2b0 M -g0 eta -2e/(2-e)\n" " rho = - --------- --- x (1+x )\n" " 8 Pi a\n\n" " with x=r/a and\n\n" " eta= 2*(e-2)*(2-b0)/(6+e) [4/9]\n" " g0 = 1-eta/2+b0 [7/9]\n\n"; if(file) warning("acceleration \"%s\": file \"%s\" ignored",name(),file); if(a<=0.) error("acceleration \"%s\": a=%f <= 0\n",name(),a); if(e<=2.) error("acceleration \"%s\": e=%f <= 2\n",name(),e); if(b0>1.) error("acceleration \"%s\": b0=%f > 1\n",name(),b0); if(npar>5) warning("acceleration \"%s\":" " skipped parameters beyond 5",name()); } /// /// routine used by class SphericalPot<DehnenMcLaughlin> of defacc.h /// /// Note that the potential for a Dehnen & McLaughlin (2005, hereafter D&M) /// model is given by (in units of GM/a) /// /// Psi = Psi_0 - Beta_y ( (1-beta0)/eta + 1/2, 1/eta ) / eta /// /// with /// /// Psi_0 = Beta( (1-beta0)/eta + 1/2, 1/eta ) / eta /// /// and /// /// y = x^eta / (1 + x^eta). /// /// Here, Beta_u(p,q) the incomplete beta function, see eq (40h) of D&M. /// At small radii (note that there is a typo in eq 40j of D&M) /// /// Psi = Psi_0 - 2/(2+eta-2*beta0) x^(eta/2+1-beta0) /// template<typename scalar> void potacc(scalar const&rq, scalar &P, scalar &T) const { if(rq == 0.) { P = Pf * BPs(1.); T = 0; } else { double r = std::sqrt(rq); double u = std::pow(ia*r,eta); double u1 = 1./(1+u); P = Pf * BPs(u1); T = GM * std::pow(u*u1,nM) / (rq*r); } } }; } //------------------------------------------------------------------------------ __DEF__ACC(SphericalPot<DehnenMcLaughlin>) //------------------------------------------------------------------------------<|fim▁end|>
// This program is distributed in the hope that it will be useful, but | // WITHOUT ANY WARRANTY; without even the implied warranty of | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | // General Public License for more details. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" Meteorology visualisation examples<|fim▁hole|><|fim▁end|>
================================== """
<|file_name|>DllLoader.cpp<|end_file_name|><|fim▁begin|>/** * @file DllLoader.cpp * @author Minmin Gong * * @section DESCRIPTION * * This source file is part of KFL, a subproject of KlayGE * For the latest info, see http://www.klayge.org * * @section LICENSE * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. <|fim▁hole|> * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * You may alternatively use this source under the terms of * the KlayGE Proprietary License (KPL). You can obtained such a license * from http://www.klayge.org/licensing/. */ #include <KFL/KFL.hpp> #include <KFL/ResIdentifier.hpp> #ifdef KLAYGE_PLATFORM_WINDOWS #include <windows.h> #else #include <dlfcn.h> #endif #include <KFL/DllLoader.hpp> namespace KlayGE { DllLoader::DllLoader() : dll_handle_(nullptr) { } DllLoader::~DllLoader() { this->Free(); } bool DllLoader::Load(std::string const & dll_name) { #ifdef KLAYGE_PLATFORM_WINDOWS #ifdef KLAYGE_PLATFORM_WINDOWS_DESKTOP dll_handle_ = static_cast<void*>(::LoadLibraryExA(dll_name.c_str(), nullptr, 0)); #else std::wstring wname; Convert(wname, dll_name); dll_handle_ = static_cast<void*>(::LoadPackagedLibrary(wname.c_str(), 0)); #endif #else dll_handle_ = ::dlopen(dll_name.c_str(), RTLD_LAZY); #endif return (dll_handle_ != nullptr); } void DllLoader::Free() { if (dll_handle_) { #ifdef KLAYGE_PLATFORM_WINDOWS ::FreeLibrary(static_cast<HMODULE>(dll_handle_)); #else ::dlclose(dll_handle_); #endif } } void* DllLoader::GetProcAddress(std::string const & proc_name) { #ifdef KLAYGE_PLATFORM_WINDOWS return reinterpret_cast<void*>(::GetProcAddress(static_cast<HMODULE>(dll_handle_), proc_name.c_str())); #else return ::dlsym(dll_handle_, proc_name.c_str()); #endif } }<|fim▁end|>
* * This program is distributed in the hope that it will be useful,
<|file_name|>queue.ts<|end_file_name|><|fim▁begin|>import { QueueAction } from './QueueAction'; import { QueueScheduler } from './QueueScheduler'; /** * * 队列调度器 * * <span class="informal">将每个任务都放到队列里,而不是立刻执行它们</span> * * `queue` 调度器, 当和延时一起使用的时候, 和 {@link async} 调度器行为一样。 * * 当和延时一起使用, 它同步地调用当前任务,即调度的时候执行。然而当递归调用的时候,即在调度的任务内, * 另一个任务由调度队列调度,而不是立即执行,该任务将被放在队列中,等待当前一个完成。 * * 这意味着当你用 `queue` 调度程序执行任务时,你确信它会在调度程序启动之前的任何其他任务结束之前结束。 * * @examples <caption>首先递归调度, 然后做一些事情</caption> * * Rx.Scheduler.queue.schedule(() => { * Rx.Scheduler.queue.schedule(() => console.log('second')); // 不会立马执行,但是会放到队列里 * * console.log('first'); * }); * * // 日志: * // "first" * // "second" * * * @example <caption>递归的重新调度自身</caption> * * Rx.Scheduler.queue.schedule(function(state) { * if (state !== 0) {<|fim▁hole|> * // 我们使用新的状态重新调度 * console.log('after', state); * } * }, 0, 3); * * // 递归运行的调度器, 你的期望: * // "before", 3 * // "before", 2 * // "before", 1 * // "after", 1 * // "after", 2 * // "after", 3 * * // 但实际使用队列的输入: * // "before", 3 * // "after", 3 * // "before", 2 * // "after", 2 * // "before", 1 * // "after", 1 * * * @static true * @name queue * @owner Scheduler */ export const queue = new QueueScheduler(QueueAction);<|fim▁end|>
* console.log('before', state); * this.schedule(state - 1); // `this` 指向当前执行的 Action,
<|file_name|>Betamax.java<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2010-2012 Regis Montoya (aka r3gis - www.r3gis.fr) * This file is part of CSipSimple. * * CSipSimple is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * If you own a pjsip commercial license you can also redistribute it * and/or modify it under the terms of the GNU Lesser General Public License * as an android library. * * CSipSimple is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with CSipSimple. If not, see <http://www.gnu.org/licenses/>. */ package com.csipsimple.wizards.impl; import android.preference.ListPreference; import android.text.TextUtils; import android.view.View; import android.widget.LinearLayout; import android.widget.TextView; import com.chatme.R;<|fim▁hole|>import com.csipsimple.api.SipConfigManager; import com.csipsimple.api.SipProfile; import com.csipsimple.utils.Log; import com.csipsimple.utils.PreferencesWrapper; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpRequestBase; import java.io.IOException; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Map.Entry; import java.util.SortedMap; import java.util.TreeMap; public class Betamax extends AuthorizationImplementation { static String PROVIDER = "provider"; protected static final String THIS_FILE = "BetamaxW"; private LinearLayout customWizard; private TextView customWizardText; ListPreference providerListPref; static SortedMap<String, String[]> providers = new TreeMap<String, String[]>() { private static final long serialVersionUID = 4984940975243241784L; { put("FreeCall", new String[] { "sip.voiparound.com", "stun.voiparound.com" }); put("InternetCalls", new String[] { "sip.internetcalls.com", "stun.internetcalls.com" }); put("Low Rate VoIP", new String[] { "sip.lowratevoip.com", "stun.lowratevoip.com" }); put("NetAppel", new String[] { "sip.netappel.fr", "stun.netappel.fr" }); put("Poivy", new String[] { "sip.poivy.com", "stun.poivy.com" }); put("SIP Discount", new String[] { "sip.sipdiscount.com", "stun.sipdiscount.com" }); put("SMS Discount", new String[] { "sip.smsdiscount.com", "stun.smsdiscount.com" }); put("SparVoIP", new String[] { "sip.sparvoip.com", "stun.sparvoip.com" }); put("VoIP Buster", new String[] { "sip.voipbuster.com", "stun.voipbuster.com" }); put("VoIP Buster Pro", new String[] { "sip.voipbusterpro.com", "stun.voipbusterpro.com" }); put("VoIP Cheap", new String[] { "sip.voipcheap.com", "stun.voipcheap.com" }); put("VoIP Discount", new String[] { "sip.voipdiscount.com", "stun.voipdiscount.com" }); put("12VoIP", new String[] { "sip.12voip.com", "stun.12voip.com" }); put("VoIP Stunt", new String[] { "sip.voipstunt.com", "stun.voipstunt.com" }); put("WebCall Direct", new String[] { "sip.webcalldirect.com", "stun.webcalldirect.com" }); put("Just VoIP", new String[] { "sip.justvoip.com", "stun.justvoip.com" }); put("Nonoh", new String[] { "sip.nonoh.net", "stun.nonoh.net" }); put("VoIPWise", new String[] { "sip.voipwise.com", "stun.voipwise.com" }); put("VoIPRaider", new String[] { "sip.voipraider.com", "stun.voipraider.com" }); put("BudgetSIP", new String[] { "sip.budgetsip.com", "stun.budgetsip.com" }); put("InterVoIP", new String[] { "sip.intervoip.com", "stun.intervoip.com" }); put("VoIPHit", new String[] { "sip.voiphit.com", "stun.voiphit.com" }); put("SmartVoIP", new String[] { "sip.smartvoip.com", "stun.smartvoip.com" }); put("ActionVoIP", new String[] { "sip.actionvoip.com", "stun.actionvoip.com" }); put("Jumblo", new String[] { "sip.jumblo.com", "stun.jumblo.com" }); put("Rynga", new String[] { "sip.rynga.com", "stun.rynga.com" }); put("PowerVoIP", new String[] { "sip.powervoip.com", "stun.powervoip.com" }); put("Voice Trading", new String[] { "sip.voicetrading.com", "stun.voicetrading.com" }); put("EasyVoip", new String[] { "sip.easyvoip.com", "stun.easyvoip.com" }); put("VoipBlast", new String[] { "sip.voipblast.com", "stun.voipblast.com" }); put("FreeVoipDeal", new String[] { "sip.freevoipdeal.com", "stun.freevoipdeal.com" }); put("VoipAlot", new String[] { "sip.voipalot.com", "" }); put("CosmoVoip", new String[] { "sip.cosmovoip.com", "stun.cosmovoip.com" }); put("BudgetVoipCall", new String[] { "sip.budgetvoipcall.com", "stun.budgetvoipcall.com" }); put("CheapBuzzer", new String[] { "sip.cheapbuzzer.com", "stun.cheapbuzzer.com" }); put("CallPirates", new String[] { "sip.callpirates.com", "stun.callpirates.com" }); put("CheapVoipCall", new String[] { "sip.cheapvoipcall.com", "stun.cheapvoipcall.com" }); put("DialCheap", new String[] { "sip.dialcheap.com", "stun.dialcheap.com" }); put("DiscountCalling", new String[] { "sip.discountcalling.com", "stun.discountcalling.com" }); put("Frynga", new String[] { "sip.frynga.com", "stun.frynga.com" }); put("GlobalFreeCall", new String[] { "sip.globalfreecall.com", "stun.globalfreecall.com" }); put("HotVoip", new String[] { "sip.hotvoip.com", "stun.hotvoip.com" }); put("MEGAvoip", new String[] { "sip.megavoip.com", "stun.megavoip.com" }); put("PennyConnect", new String[] { "sip.pennyconnect.com", "stun.pennyconnect.com" }); put("Rebvoice", new String[] { "sip.rebvoice.com", "stun.rebvoice.com" }); put("StuntCalls", new String[] { "sip.stuntcalls.com", "stun.stuntcalls.com" }); put("VoipBlazer", new String[] { "sip.voipblazer.com", "stun.voipblazer.com" }); put("VoipCaptain", new String[] { "sip.voipcaptain.com", "stun.voipcaptain.com" }); put("VoipChief", new String[] { "sip.voipchief.com", "stun.voipchief.com" }); put("VoipJumper", new String[] { "sip.voipjumper.com", "stun.voipjumper.com" }); put("VoipMove", new String[] { "sip.voipmove.com", "stun.voipmove.com" }); put("VoipSmash", new String[] { "sip.voipsmash.com", "stun.voipsmash.com" }); put("VoipGain", new String[] { "sip.voipgain.com", "stun.voipgain.com" }); put("VoipZoom", new String[] { "sip.voipzoom.com", "stun.voipzoom.com" }); put("Telbo", new String[] { "sip.telbo.com", "stun.telbo.com" }); put("Llevoip", new String[] { "77.72.174.129", "77.72.174.160" }); put("Llevoip (server 2)", new String[] { "77.72.174.130:6000", "77.72.174.162" }); /* * put("InternetCalls", new String[] {"", ""}); */ } }; /** * {@inheritDoc} */ @Override protected String getDefaultName() { return "Betamax"; } private static final String PROVIDER_LIST_KEY = "provider_list"; /** * {@inheritDoc} */ @Override public void fillLayout(final SipProfile account) { super.fillLayout(account); accountUsername.setTitle(R.string.w_advanced_caller_id); accountUsername.setDialogTitle(R.string.w_advanced_caller_id_desc); boolean recycle = true; providerListPref = (ListPreference) findPreference(PROVIDER_LIST_KEY); if (providerListPref == null) { Log.d(THIS_FILE, "Create new list pref"); providerListPref = new ListPreference(parent); providerListPref.setKey(PROVIDER_LIST_KEY); recycle = false; } else { Log.d(THIS_FILE, "Recycle existing list pref"); } CharSequence[] v = new CharSequence[providers.size()]; int i = 0; for (String pv : providers.keySet()) { v[i] = pv; i++; } providerListPref.setEntries(v); providerListPref.setEntryValues(v); providerListPref.setKey(PROVIDER); providerListPref.setDialogTitle("Provider"); providerListPref.setTitle("Provider"); providerListPref.setSummary("Betamax clone provider"); providerListPref.setDefaultValue("12VoIP"); if (!recycle) { addPreference(providerListPref); } hidePreference(null, SERVER); String domain = account.getDefaultDomain(); if (domain != null) { for (Entry<String, String[]> entry : providers.entrySet()) { String[] val = entry.getValue(); if (val[0].equalsIgnoreCase(domain)) { Log.d(THIS_FILE, "Set provider list pref value to " + entry.getKey()); providerListPref.setValue(entry.getKey()); break; } } } Log.d(THIS_FILE, providerListPref.getValue()); // Get wizard specific row customWizardText = (TextView) parent.findViewById(R.id.custom_wizard_text); customWizard = (LinearLayout) parent.findViewById(R.id.custom_wizard_row); updateAccountInfos(account); } /** * {@inheritDoc} */ @Override public SipProfile buildAccount(SipProfile account) { account = super.buildAccount(account); account.mwi_enabled = false; return account; } private static HashMap<String, Integer> SUMMARIES = new HashMap<String, Integer>() { /** * */ private static final long serialVersionUID = -5743705263738203615L; { put(DISPLAY_NAME, R.string.w_common_display_name_desc); put(USER_NAME, R.string.w_advanced_caller_id_desc); put(AUTH_NAME, R.string.w_authorization_auth_name_desc); put(PASSWORD, R.string.w_common_password_desc); put(SERVER, R.string.w_common_server_desc); } }; /** * {@inheritDoc} */ @Override public void updateDescriptions() { super.updateDescriptions(); setStringFieldSummary(PROVIDER); } /** * {@inheritDoc} */ @Override public String getDefaultFieldSummary(String fieldName) { Integer res = SUMMARIES.get(fieldName); if (fieldName == PROVIDER) { if (providerListPref != null) { return providerListPref.getValue(); } } if (res != null) { return parent.getString(res); } return ""; } /** * {@inheritDoc} */ @Override public boolean canSave() { boolean isValid = true; isValid &= checkField(accountDisplayName, isEmpty(accountDisplayName)); isValid &= checkField(accountUsername, isEmpty(accountUsername)); isValid &= checkField(accountAuthorization, isEmpty(accountAuthorization)); isValid &= checkField(accountPassword, isEmpty(accountPassword)); return isValid; } /** * {@inheritDoc} */ @Override protected String getDomain() { String provider = providerListPref.getValue(); if (provider != null) { String[] set = providers.get(provider); return set[0]; } return ""; } /** * {@inheritDoc} */ @Override public boolean needRestart() { return true; } /** * {@inheritDoc} */ @Override public void setDefaultParams(PreferencesWrapper prefs) { super.setDefaultParams(prefs); // Disable ICE and turn on STUN!!! prefs.setPreferenceBooleanValue(SipConfigManager.ENABLE_STUN, true); String provider = providerListPref.getValue(); if (provider != null) { String[] set = providers.get(provider); if (!TextUtils.isEmpty(set[1])) { prefs.addStunServer(set[1]); } } prefs.setPreferenceBooleanValue(SipConfigManager.ENABLE_ICE, false); } private void updateAccountInfos(final SipProfile acc) { if (acc != null && acc.id != SipProfile.INVALID_ID) { customWizard.setVisibility(View.GONE); accountBalanceHelper.launchRequest(acc); } else { // add a row to link customWizard.setVisibility(View.GONE); } } private AccountBalanceHelper accountBalanceHelper = new AccountBalance(this); private static class AccountBalance extends AccountBalanceHelper { WeakReference<Betamax> w; AccountBalance(Betamax wizard){ w = new WeakReference<Betamax>(wizard); } /** * {@inheritDoc} */ @Override public HttpRequestBase getRequest(SipProfile acc) throws IOException { Betamax wizard = w.get(); if(wizard == null) { return null; } String requestURL = "https://"; String provider = wizard.providerListPref.getValue(); if (provider != null) { String[] set = providers.get(provider); requestURL += set[0].replace("sip.", "www."); requestURL += "/myaccount/getbalance.php"; requestURL += "?username=" + acc.username; requestURL += "&password=" + acc.data; return new HttpGet(requestURL); } return null; } /** * {@inheritDoc} */ @Override public String parseResponseLine(String line) { try { float value = Float.parseFloat(line.trim()); if (value >= 0) { return "Balance : " + Math.round(value * 100.0) / 100.0 + " euros"; } } catch (NumberFormatException e) { Log.e(THIS_FILE, "Can't get value for line"); } return null; } /** * {@inheritDoc} */ @Override public void applyResultError() { Betamax wizard = w.get(); if(wizard != null) { wizard.customWizard.setVisibility(View.GONE); } } /** * {@inheritDoc} */ @Override public void applyResultSuccess(String balanceText) { Betamax wizard = w.get(); if(wizard != null) { wizard.customWizardText.setText(balanceText); wizard.customWizard.setVisibility(View.VISIBLE); } } }; }<|fim▁end|>
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>"""Configuration and injectable fixtures for Pytest.<|fim▁hole|> DI and functions over complex inheritance hierarchies FTW! """ import os import warnings pytest_plugins = ["abilian.testing.fixtures"] if os.environ.get("FAIL_ON_WARNINGS"): # Don't remove ! # noinspection PyUnresolvedReferences import pandas warnings.simplefilter("error")<|fim▁end|>
Supposed to replace the too-complex current UnitTest-based testing framework.
<|file_name|>test_pm_force.py<|end_file_name|><|fim▁begin|># # Test PM force parallelisation: # check force does not depend on number of MPI nodes import fs import numpy as np import h5py import pm_setup # read reference file # $ python3 create_force_h5.py to create file = h5py.File('force_%s.h5' % fs.config_precision(), 'r') ref_id = file['id'][:] ref_force = file['f'][:] file.close() # compute PM force fs.msg.set_loglevel(0) particles = pm_setup.force() particle_id = particles.id particle_force = particles.force # compare two forces if fs.comm.this_node() == 0: assert(np.all(particle_id == ref_id)) print('pm_force id OK') force_rms = np.std(ref_force) diff = particle_force - ref_force diff_rms = np.std(diff)<|fim▁hole|> print('pm_force rms error %e / %e' % (diff_rms, force_rms)) diff_max = np.max(np.abs(diff)) print('pm_force max error %e / %e' % (diff_max, force_rms)) eps = np.finfo(particle_force.dtype).eps assert(diff_rms < 20*eps) assert(diff_max < 1000*eps) print('pm_force OK')<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from warnings import warn from beaker.crypto.pbkdf2 import PBKDF2, strxor from beaker.crypto.util import hmac, sha1, hmac_sha1, md5 from beaker import util keyLength = None if util.jython: try: from beaker.crypto.jcecrypto import getKeyLength, aesEncrypt keyLength = getKeyLength() except ImportError: pass else: try:<|fim▁hole|> from beaker.crypto.pycrypto import getKeyLength, aesEncrypt, aesDecrypt keyLength = getKeyLength() except ImportError: pass if not keyLength: has_aes = False else: has_aes = True if has_aes and keyLength < 32: warn('Crypto implementation only supports key lengths up to %d bits. ' 'Generated session cookies may be incompatible with other ' 'environments' % (keyLength * 8)) def generateCryptoKeys(master_key, salt, iterations): # NB: We XOR parts of the keystream into the randomly-generated parts, just # in case os.urandom() isn't as random as it should be. Note that if # os.urandom() returns truly random data, this will have no effect on the # overall security. keystream = PBKDF2(master_key, salt, iterations=iterations) cipher_key = keystream.read(keyLength) return cipher_key<|fim▁end|>
<|file_name|>clientChat.js<|end_file_name|><|fim▁begin|>"use strict"; exports.__esModule = true; var vueClient_1 = require("../../bibliotheque/vueClient"); console.log("* Chargement du script"); /* Test - déclaration d'une variable externe - Possible cf. declare */ function centreNoeud() { return JSON.parse(vueClient_1.contenuBalise(document, 'centre')); } function voisinsNoeud() { var v = JSON.parse(vueClient_1.contenuBalise(document, 'voisins')); var r = []; var id; for (id in v) { r.push(v[id]); } return r; } function adresseServeur() { return vueClient_1.contenuBalise(document, 'adresseServeur'); } /* type CanalChat = CanalClient<FormatMessageTchat>; // A initialiser var canal: CanalChat; var noeud: Noeud<FormatSommetTchat>; <|fim▁hole|> console.log("- Envoi du message net : " + msg.net()); canal.envoyerMessage(msg); initialiserEntree('message_' + destinataire, ""); } // A exécuter après chargement de la page function initialisation(): void { console.log("* Initialisation après chargement du DOM ...") noeud = creerNoeud<FormatSommetTchat>(centreNoeud(), voisinsNoeud(), creerSommetTchat); canal = new CanalClient<FormatMessageTchat>(adresseServeur()); canal.enregistrerTraitementAReception((m: FormatMessageTchat) => { let msg = new MessageTchat(m); console.log("- Réception du message brut : " + msg.brut()); console.log("- Réception du message net : " + msg.net()); posterNL('logChats', msg.net()); }); console.log("* ... du noeud et du canal côté client en liaison avec le serveur : " + adresseServeur()); // Gestion des événements pour les éléments du document. //document.getElementById("boutonEnvoi").addEventListener("click", <EventListenerOrEventListenerObject>(e => {alert("click!");}), true); let id: Identifiant; let v = noeud.voisins(); for (id in v) { console.log("id : " +id); let idVal = id; gererEvenementElement("boutonEnvoi_" + idVal, "click", e => { console.log("id message_" + idVal); console.log("entree : " + recupererEntree("message_" + idVal)); envoyerMessage(recupererEntree("message_" + idVal), idVal); }); } <form id="envoi"> <input type="text" id="message_id1"> <input class="button" type="button" id="boutonEnvoi_id1" value="Envoyer un message à {{nom id1}}." onClick="envoyerMessage(this.form.message.value, "id1")"> </form> console.log("* ... et des gestionnaires d'événements sur des éléments du document."); } // Gestion des événements pour le document console.log("* Enregistrement de l'initialisation"); gererEvenementDocument('DOMContentLoaded', initialisation); <script type="text/javascript"> document.addEventListener('DOMContentLoaded', initialisation()); </script> */ //# sourceMappingURL=clientChat.js.map<|fim▁end|>
function envoyerMessage(texte: string, destinataire: Identifiant) { let msg: MessageTchat = creerMessageCommunication(noeud.centre().enJSON().id, destinataire, texte); console.log("- Envoi du message brut : " + msg.brut());
<|file_name|>visitors.js<|end_file_name|><|fim▁begin|>"use strict"; exports.__esModule = true; var _typeof2 = require("babel-runtime/helpers/typeof"); var _typeof3 = _interopRequireDefault(_typeof2); var _keys = require("babel-runtime/core-js/object/keys"); var _keys2 = _interopRequireDefault(_keys); var _getIterator2 = require("babel-runtime/core-js/get-iterator"); var _getIterator3 = _interopRequireDefault(_getIterator2); exports.explode = explode; exports.verify = verify; exports.merge = merge; var _virtualTypes = require("./path/lib/virtual-types"); var virtualTypes = _interopRequireWildcard(_virtualTypes); var _babelMessages = require("babel-messages"); var messages = _interopRequireWildcard(_babelMessages); var _babelTypes = require("babel-types"); var t = _interopRequireWildcard(_babelTypes); var _clone = require("lodash/clone"); var _clone2 = _interopRequireDefault(_clone); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function explode(visitor) { if (visitor._exploded) return visitor; visitor._exploded = true; for (var nodeType in visitor) { if (shouldIgnoreKey(nodeType)) continue; var parts = nodeType.split("|"); if (parts.length === 1) continue; var fns = visitor[nodeType]; delete visitor[nodeType]; for (var _iterator = parts, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : (0, _getIterator3.default)(_iterator);;) { var _ref; if (_isArray) { if (_i >= _iterator.length) break; _ref = _iterator[_i++]; } else { _i = _iterator.next(); if (_i.done) break; _ref = _i.value; } var part = _ref; visitor[part] = fns; } } verify(visitor); delete visitor.__esModule; ensureEntranceObjects(visitor); ensureCallbackArrays(visitor); for (var _iterator2 = (0, _keys2.default)(visitor), _isArray2 = Array.isArray(_iterator2), _i2 = 0, _iterator2 = _isArray2 ? _iterator2 : (0, _getIterator3.default)(_iterator2);;) { var _ref2; if (_isArray2) { if (_i2 >= _iterator2.length) break; _ref2 = _iterator2[_i2++]; } else { _i2 = _iterator2.next(); if (_i2.done) break; _ref2 = _i2.value; } var _nodeType3 = _ref2; if (shouldIgnoreKey(_nodeType3)) continue; var wrapper = virtualTypes[_nodeType3]; if (!wrapper) continue; var _fns2 = visitor[_nodeType3]; for (var type in _fns2) { _fns2[type] = wrapCheck(wrapper, _fns2[type]); } delete visitor[_nodeType3]; if (wrapper.types) { for (var _iterator4 = wrapper.types, _isArray4 = Array.isArray(_iterator4), _i4 = 0, _iterator4 = _isArray4 ? _iterator4 : (0, _getIterator3.default)(_iterator4);;) { var _ref4; if (_isArray4) { if (_i4 >= _iterator4.length) break; _ref4 = _iterator4[_i4++]; } else {<|fim▁hole|> if (_i4.done) break; _ref4 = _i4.value; } var _type = _ref4; if (visitor[_type]) { mergePair(visitor[_type], _fns2); } else { visitor[_type] = _fns2; } } } else { mergePair(visitor, _fns2); } } for (var _nodeType in visitor) { if (shouldIgnoreKey(_nodeType)) continue; var _fns = visitor[_nodeType]; var aliases = t.FLIPPED_ALIAS_KEYS[_nodeType]; var deprecratedKey = t.DEPRECATED_KEYS[_nodeType]; if (deprecratedKey) { console.trace("Visitor defined for " + _nodeType + " but it has been renamed to " + deprecratedKey); aliases = [deprecratedKey]; } if (!aliases) continue; delete visitor[_nodeType]; for (var _iterator3 = aliases, _isArray3 = Array.isArray(_iterator3), _i3 = 0, _iterator3 = _isArray3 ? _iterator3 : (0, _getIterator3.default)(_iterator3);;) { var _ref3; if (_isArray3) { if (_i3 >= _iterator3.length) break; _ref3 = _iterator3[_i3++]; } else { _i3 = _iterator3.next(); if (_i3.done) break; _ref3 = _i3.value; } var alias = _ref3; var existing = visitor[alias]; if (existing) { mergePair(existing, _fns); } else { visitor[alias] = (0, _clone2.default)(_fns); } } } for (var _nodeType2 in visitor) { if (shouldIgnoreKey(_nodeType2)) continue; ensureCallbackArrays(visitor[_nodeType2]); } return visitor; } function verify(visitor) { if (visitor._verified) return; if (typeof visitor === "function") { throw new Error(messages.get("traverseVerifyRootFunction")); } for (var nodeType in visitor) { if (nodeType === "enter" || nodeType === "exit") { validateVisitorMethods(nodeType, visitor[nodeType]); } if (shouldIgnoreKey(nodeType)) continue; if (t.TYPES.indexOf(nodeType) < 0) { throw new Error(messages.get("traverseVerifyNodeType", nodeType)); } var visitors = visitor[nodeType]; if ((typeof visitors === "undefined" ? "undefined" : (0, _typeof3.default)(visitors)) === "object") { for (var visitorKey in visitors) { if (visitorKey === "enter" || visitorKey === "exit") { validateVisitorMethods(nodeType + "." + visitorKey, visitors[visitorKey]); } else { throw new Error(messages.get("traverseVerifyVisitorProperty", nodeType, visitorKey)); } } } } visitor._verified = true; } function validateVisitorMethods(path, val) { var fns = [].concat(val); for (var _iterator5 = fns, _isArray5 = Array.isArray(_iterator5), _i5 = 0, _iterator5 = _isArray5 ? _iterator5 : (0, _getIterator3.default)(_iterator5);;) { var _ref5; if (_isArray5) { if (_i5 >= _iterator5.length) break; _ref5 = _iterator5[_i5++]; } else { _i5 = _iterator5.next(); if (_i5.done) break; _ref5 = _i5.value; } var fn = _ref5; if (typeof fn !== "function") { throw new TypeError("Non-function found defined in " + path + " with type " + (typeof fn === "undefined" ? "undefined" : (0, _typeof3.default)(fn))); } } } function merge(visitors) { var states = arguments.length <= 1 || arguments[1] === undefined ? [] : arguments[1]; var wrapper = arguments[2]; var rootVisitor = {}; for (var i = 0; i < visitors.length; i++) { var visitor = visitors[i]; var state = states[i]; explode(visitor); for (var type in visitor) { var visitorType = visitor[type]; if (state || wrapper) { visitorType = wrapWithStateOrWrapper(visitorType, state, wrapper); } var nodeVisitor = rootVisitor[type] = rootVisitor[type] || {}; mergePair(nodeVisitor, visitorType); } } return rootVisitor; } function wrapWithStateOrWrapper(oldVisitor, state, wrapper) { var newVisitor = {}; var _loop = function _loop(key) { var fns = oldVisitor[key]; if (!Array.isArray(fns)) return "continue"; fns = fns.map(function (fn) { var newFn = fn; if (state) { newFn = function newFn(path) { return fn.call(state, path, state); }; } if (wrapper) { newFn = wrapper(state.key, key, newFn); } return newFn; }); newVisitor[key] = fns; }; for (var key in oldVisitor) { var _ret = _loop(key); if (_ret === "continue") continue; } return newVisitor; } function ensureEntranceObjects(obj) { for (var key in obj) { if (shouldIgnoreKey(key)) continue; var fns = obj[key]; if (typeof fns === "function") { obj[key] = { enter: fns }; } } } function ensureCallbackArrays(obj) { if (obj.enter && !Array.isArray(obj.enter)) obj.enter = [obj.enter]; if (obj.exit && !Array.isArray(obj.exit)) obj.exit = [obj.exit]; } function wrapCheck(wrapper, fn) { var newFn = function newFn(path) { if (wrapper.checkPath(path)) { return fn.apply(this, arguments); } }; newFn.toString = function () { return fn.toString(); }; return newFn; } function shouldIgnoreKey(key) { if (key[0] === "_") return true; if (key === "enter" || key === "exit" || key === "shouldSkip") return true; if (key === "blacklist" || key === "noScope" || key === "skipKeys") return true; return false; } function mergePair(dest, src) { for (var key in src) { dest[key] = [].concat(dest[key] || [], src[key]); } }<|fim▁end|>
_i4 = _iterator4.next();
<|file_name|>csv.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at //<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! CSV Data source use std::fs::File; use arrow::csv; use arrow::datatypes::{Field, Schema}; use arrow::record_batch::RecordBatch; use std::string::String; use std::sync::Arc; use crate::datasource::{ScanResult, TableProvider}; use crate::error::Result; use crate::execution::physical_plan::csv::CsvExec; use crate::execution::physical_plan::{BatchIterator, ExecutionPlan}; /// Represents a CSV file with a provided schema // TODO: usage example (rather than documenting `new()`) pub struct CsvFile { filename: String, schema: Arc<Schema>, has_header: bool, } impl CsvFile { #[allow(missing_docs)] pub fn new(filename: &str, schema: &Schema, has_header: bool) -> Self { Self { filename: String::from(filename), schema: Arc::new(schema.clone()), has_header, } } } impl TableProvider for CsvFile { fn schema(&self) -> &Arc<Schema> { &self.schema } fn scan( &self, projection: &Option<Vec<usize>>, batch_size: usize, ) -> Result<Vec<ScanResult>> { let exec = CsvExec::try_new( &self.filename, self.schema.clone(), self.has_header, projection.clone(), batch_size, )?; let partitions = exec.partitions()?; let iterators = partitions .iter() .map(|p| p.execute()) .collect::<Result<Vec<_>>>()?; Ok(iterators) } } /// Iterator over CSV batches // TODO: usage example (rather than documenting `new()`) pub struct CsvBatchIterator { schema: Arc<Schema>, reader: csv::Reader<File>, } impl CsvBatchIterator { #[allow(missing_docs)] pub fn try_new( filename: &str, schema: Arc<Schema>, has_header: bool, projection: &Option<Vec<usize>>, batch_size: usize, ) -> Result<Self> { let file = File::open(filename)?; let reader = csv::Reader::new( file, schema.clone(), has_header, batch_size, projection.clone(), ); let projected_schema = match projection { Some(p) => { let projected_fields: Vec<Field> = p.iter().map(|i| schema.fields()[*i].clone()).collect(); Arc::new(Schema::new(projected_fields)) } None => schema, }; Ok(Self { schema: projected_schema, reader, }) } } impl BatchIterator for CsvBatchIterator { fn schema(&self) -> Arc<Schema> { self.schema.clone() } fn next(&mut self) -> Result<Option<RecordBatch>> { Ok(self.reader.next()?) } }<|fim▁end|>
<|file_name|>utils.go<|end_file_name|><|fim▁begin|>package configor import ( "encoding/json" "errors" "fmt" "io/ioutil" "os" "path" "reflect" "strings" "github.com/BurntSushi/toml" yaml "gopkg.in/yaml.v2" ) func (configor *Configor) getENVPrefix(config interface{}) string { if configor.Config.ENVPrefix == "" { if prefix := os.Getenv("CONFIGOR_ENV_PREFIX"); prefix != "" { return prefix } return "Configor" } return configor.Config.ENVPrefix } func getConfigurationFileWithENVPrefix(file, env string) (string, error) { var ( envFile string extname = path.Ext(file) ) if extname == "" { envFile = fmt.Sprintf("%v.%v", file, env) } else { envFile = fmt.Sprintf("%v.%v%v", strings.TrimSuffix(file, extname), env, extname) } if fileInfo, err := os.Stat(envFile); err == nil && fileInfo.Mode().IsRegular() { return envFile, nil } return "", fmt.Errorf("failed to find file %v", file) } func (configor *Configor) getConfigurationFiles(files ...string) []string { var results []string if configor.Config.Debug || configor.Config.Verbose { fmt.Printf("Current environment: '%v'\n", configor.GetEnvironment()) } for i := len(files) - 1; i >= 0; i-- { foundFile := false file := files[i] // check configuration if fileInfo, err := os.Stat(file); err == nil && fileInfo.Mode().IsRegular() { foundFile = true results = append(results, file) } // check configuration with env if file, err := getConfigurationFileWithENVPrefix(file, configor.GetEnvironment()); err == nil { foundFile = true results = append(results, file) } // check example configuration if !foundFile {<|fim▁hole|> fmt.Printf("Failed to find configuration %v\n", file) } } } return results } func processFile(config interface{}, file string) error { data, err := ioutil.ReadFile(file) if err != nil { return err } switch { case strings.HasSuffix(file, ".yaml") || strings.HasSuffix(file, ".yml"): return yaml.Unmarshal(data, config) case strings.HasSuffix(file, ".toml"): return toml.Unmarshal(data, config) case strings.HasSuffix(file, ".json"): return json.Unmarshal(data, config) default: if toml.Unmarshal(data, config) != nil { if json.Unmarshal(data, config) != nil { if yaml.Unmarshal(data, config) != nil { return errors.New("failed to decode config") } } } return nil } } func getPrefixForStruct(prefixes []string, fieldStruct *reflect.StructField) []string { if fieldStruct.Anonymous && fieldStruct.Tag.Get("anonymous") == "true" { return prefixes } return append(prefixes, fieldStruct.Name) } func (configor *Configor) processTags(config interface{}, prefixes ...string) error { configValue := reflect.Indirect(reflect.ValueOf(config)) if configValue.Kind() != reflect.Struct { return errors.New("invalid config, should be struct") } configType := configValue.Type() for i := 0; i < configType.NumField(); i++ { var ( envNames []string fieldStruct = configType.Field(i) field = configValue.Field(i) envName = fieldStruct.Tag.Get("env") // read configuration from shell env ) if !field.CanAddr() || !field.CanInterface() { continue } if envName == "" { envNames = append(envNames, strings.Join(append(prefixes, fieldStruct.Name), "_")) // Configor_DB_Name envNames = append(envNames, strings.ToUpper(strings.Join(append(prefixes, fieldStruct.Name), "_"))) // CONFIGOR_DB_NAME } else { envNames = []string{envName} } if configor.Config.Verbose { fmt.Printf("Trying to load struct `%v`'s field `%v` from env %v\n", configType.Name(), fieldStruct.Name, strings.Join(envNames, ", ")) } // Load From Shell ENV for _, env := range envNames { if value := os.Getenv(env); value != "" { if configor.Config.Debug || configor.Config.Verbose { fmt.Printf("Loading configuration for struct `%v`'s field `%v` from env %v...\n", configType.Name(), fieldStruct.Name, env) } if err := yaml.Unmarshal([]byte(value), field.Addr().Interface()); err != nil { return err } break } } if isBlank := reflect.DeepEqual(field.Interface(), reflect.Zero(field.Type()).Interface()); isBlank { // Set default configuration if blank if value := fieldStruct.Tag.Get("default"); value != "" { if err := yaml.Unmarshal([]byte(value), field.Addr().Interface()); err != nil { return err } } else if fieldStruct.Tag.Get("required") == "true" { // return error if it is required but blank return errors.New(fieldStruct.Name + " is required, but blank") } } for field.Kind() == reflect.Ptr { field = field.Elem() } if field.Kind() == reflect.Struct { if err := configor.processTags(field.Addr().Interface(), getPrefixForStruct(prefixes, &fieldStruct)...); err != nil { return err } } if field.Kind() == reflect.Slice { for i := 0; i < field.Len(); i++ { if reflect.Indirect(field.Index(i)).Kind() == reflect.Struct { if err := configor.processTags(field.Index(i).Addr().Interface(), append(getPrefixForStruct(prefixes, &fieldStruct), fmt.Sprint(i))...); err != nil { return err } } } } } return nil }<|fim▁end|>
if example, err := getConfigurationFileWithENVPrefix(file, "example"); err == nil { fmt.Printf("Failed to find configuration %v, using example file %v\n", file, example) results = append(results, example) } else {
<|file_name|>packet.go<|end_file_name|><|fim▁begin|>// Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package packet implements parsing and serialization of OpenPGP packets, as // specified in RFC 4880. package packet import ( "big" "crypto/aes" "crypto/cast5" "crypto/cipher" "crypto/openpgp/error" "io" "os" ) // readFull is the same as io.ReadFull except that reading zero bytes returns // ErrUnexpectedEOF rather than EOF. func readFull(r io.Reader, buf []byte) (n int, err os.Error) { n, err = io.ReadFull(r, buf) if err == os.EOF { err = io.ErrUnexpectedEOF } return } // readLength reads an OpenPGP length from r. See RFC 4880, section 4.2.2. func readLength(r io.Reader) (length int64, isPartial bool, err os.Error) { var buf [4]byte _, err = readFull(r, buf[:1]) if err != nil { return } switch { case buf[0] < 192: length = int64(buf[0]) case buf[0] < 224: length = int64(buf[0]-192) << 8 _, err = readFull(r, buf[0:1]) if err != nil { return } length += int64(buf[0]) + 192 case buf[0] < 255: length = int64(1) << (buf[0] & 0x1f) isPartial = true default: _, err = readFull(r, buf[0:4]) if err != nil { return } length = int64(buf[0])<<24 | int64(buf[1])<<16 | int64(buf[2])<<8 | int64(buf[3]) } return } // partialLengthReader wraps an io.Reader and handles OpenPGP partial lengths. // The continuation lengths are parsed and removed from the stream and EOF is // returned at the end of the packet. See RFC 4880, section 4.2.2.4. type partialLengthReader struct { r io.Reader remaining int64 isPartial bool } func (r *partialLengthReader) Read(p []byte) (n int, err os.Error) { for r.remaining == 0 { if !r.isPartial { return 0, os.EOF } r.remaining, r.isPartial, err = readLength(r.r) if err != nil { return 0, err } } toRead := int64(len(p)) if toRead > r.remaining { toRead = r.remaining } n, err = r.r.Read(p[:int(toRead)]) r.remaining -= int64(n) if n < int(toRead) && err == os.EOF { err = io.ErrUnexpectedEOF } return } // A spanReader is an io.LimitReader, but it returns ErrUnexpectedEOF if the // underlying Reader returns EOF before the limit has been reached. type spanReader struct { r io.Reader n int64 } func (l *spanReader) Read(p []byte) (n int, err os.Error) { if l.n <= 0 { return 0, os.EOF } if int64(len(p)) > l.n { p = p[0:l.n] } n, err = l.r.Read(p) l.n -= int64(n) if l.n > 0 && err == os.EOF { err = io.ErrUnexpectedEOF } return } // readHeader parses a packet header and returns an io.Reader which will return // the contents of the packet. See RFC 4880, section 4.2. func readHeader(r io.Reader) (tag packetType, length int64, contents io.Reader, err os.Error) { var buf [4]byte _, err = io.ReadFull(r, buf[:1]) if err != nil { return } if buf[0]&0x80 == 0 { err = error.StructuralError("tag byte does not have MSB set") return } if buf[0]&0x40 == 0 { // Old format packet tag = packetType((buf[0] & 0x3f) >> 2) lengthType := buf[0] & 3 if lengthType == 3 { length = -1 contents = r return } lengthBytes := 1 << lengthType _, err = readFull(r, buf[0:lengthBytes]) if err != nil { return } for i := 0; i < lengthBytes; i++ { length <<= 8 length |= int64(buf[i]) } contents = &spanReader{r, length} return } // New format packet tag = packetType(buf[0] & 0x3f) length, isPartial, err := readLength(r) if err != nil { return } if isPartial { contents = &partialLengthReader{ remaining: length, isPartial: true, r: r, } length = -1 } else { contents = &spanReader{r, length} } return } // serializeHeader writes an OpenPGP packet header to w. See RFC 4880, section // 4.2. func serializeHeader(w io.Writer, ptype packetType, length int) (err os.Error) { var buf [6]byte var n int buf[0] = 0x80 | 0x40 | byte(ptype) if length < 192 {<|fim▁hole|> buf[1] = 192 + byte(length>>8) buf[2] = byte(length) n = 3 } else { buf[1] = 255 buf[2] = byte(length >> 24) buf[3] = byte(length >> 16) buf[4] = byte(length >> 8) buf[5] = byte(length) n = 6 } _, err = w.Write(buf[:n]) return } // Packet represents an OpenPGP packet. Users are expected to try casting // instances of this interface to specific packet types. type Packet interface { parse(io.Reader) os.Error } // consumeAll reads from the given Reader until error, returning the number of // bytes read. func consumeAll(r io.Reader) (n int64, err os.Error) { var m int var buf [1024]byte for { m, err = r.Read(buf[:]) n += int64(m) if err == os.EOF { err = nil return } if err != nil { return } } panic("unreachable") } // packetType represents the numeric ids of the different OpenPGP packet types. See // http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-2 type packetType uint8 const ( packetTypeEncryptedKey packetType = 1 packetTypeSignature packetType = 2 packetTypeSymmetricKeyEncrypted packetType = 3 packetTypeOnePassSignature packetType = 4 packetTypePrivateKey packetType = 5 packetTypePublicKey packetType = 6 packetTypePrivateSubkey packetType = 7 packetTypeCompressed packetType = 8 packetTypeSymmetricallyEncrypted packetType = 9 packetTypeLiteralData packetType = 11 packetTypeUserId packetType = 13 packetTypePublicSubkey packetType = 14 packetTypeSymmetricallyEncryptedMDC packetType = 18 ) // Read reads a single OpenPGP packet from the given io.Reader. If there is an // error parsing a packet, the whole packet is consumed from the input. func Read(r io.Reader) (p Packet, err os.Error) { tag, _, contents, err := readHeader(r) if err != nil { return } switch tag { case packetTypeEncryptedKey: p = new(EncryptedKey) case packetTypeSignature: p = new(Signature) case packetTypeSymmetricKeyEncrypted: p = new(SymmetricKeyEncrypted) case packetTypeOnePassSignature: p = new(OnePassSignature) case packetTypePrivateKey, packetTypePrivateSubkey: pk := new(PrivateKey) if tag == packetTypePrivateSubkey { pk.IsSubkey = true } p = pk case packetTypePublicKey, packetTypePublicSubkey: pk := new(PublicKey) if tag == packetTypePublicSubkey { pk.IsSubkey = true } p = pk case packetTypeCompressed: p = new(Compressed) case packetTypeSymmetricallyEncrypted: p = new(SymmetricallyEncrypted) case packetTypeLiteralData: p = new(LiteralData) case packetTypeUserId: p = new(UserId) case packetTypeSymmetricallyEncryptedMDC: se := new(SymmetricallyEncrypted) se.MDC = true p = se default: err = error.UnknownPacketTypeError(tag) } if p != nil { err = p.parse(contents) } if err != nil { consumeAll(contents) } return } // SignatureType represents the different semantic meanings of an OpenPGP // signature. See RFC 4880, section 5.2.1. type SignatureType uint8 const ( SigTypeBinary SignatureType = 0 SigTypeText = 1 SigTypeGenericCert = 0x10 SigTypePersonaCert = 0x11 SigTypeCasualCert = 0x12 SigTypePositiveCert = 0x13 SigTypeSubkeyBinding = 0x18 ) // PublicKeyAlgorithm represents the different public key system specified for // OpenPGP. See // http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-12 type PublicKeyAlgorithm uint8 const ( PubKeyAlgoRSA PublicKeyAlgorithm = 1 PubKeyAlgoRSAEncryptOnly PublicKeyAlgorithm = 2 PubKeyAlgoRSASignOnly PublicKeyAlgorithm = 3 PubKeyAlgoElgamal PublicKeyAlgorithm = 16 PubKeyAlgoDSA PublicKeyAlgorithm = 17 ) // CipherFunction represents the different block ciphers specified for OpenPGP. See // http://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-13 type CipherFunction uint8 const ( CipherCAST5 = 3 CipherAES128 = 7 CipherAES192 = 8 CipherAES256 = 9 ) // keySize returns the key size, in bytes, of cipher. func (cipher CipherFunction) keySize() int { switch cipher { case CipherCAST5: return cast5.KeySize case CipherAES128: return 16 case CipherAES192: return 24 case CipherAES256: return 32 } return 0 } // blockSize returns the block size, in bytes, of cipher. func (cipher CipherFunction) blockSize() int { switch cipher { case CipherCAST5: return 8 case CipherAES128, CipherAES192, CipherAES256: return 16 } return 0 } // new returns a fresh instance of the given cipher. func (cipher CipherFunction) new(key []byte) (block cipher.Block) { switch cipher { case CipherCAST5: block, _ = cast5.NewCipher(key) case CipherAES128, CipherAES192, CipherAES256: block, _ = aes.NewCipher(key) } return } // readMPI reads a big integer from r. The bit length returned is the bit // length that was specified in r. This is preserved so that the integer can be // reserialized exactly. func readMPI(r io.Reader) (mpi []byte, bitLength uint16, err os.Error) { var buf [2]byte _, err = readFull(r, buf[0:]) if err != nil { return } bitLength = uint16(buf[0])<<8 | uint16(buf[1]) numBytes := (int(bitLength) + 7) / 8 mpi = make([]byte, numBytes) _, err = readFull(r, mpi) return } // mpiLength returns the length of the given *big.Int when serialized as an // MPI. func mpiLength(n *big.Int) (mpiLengthInBytes int) { mpiLengthInBytes = 2 /* MPI length */ mpiLengthInBytes += (n.BitLen() + 7) / 8 return } // writeMPI serializes a big integer to w. func writeMPI(w io.Writer, bitLength uint16, mpiBytes []byte) (err os.Error) { _, err = w.Write([]byte{byte(bitLength >> 8), byte(bitLength)}) if err == nil { _, err = w.Write(mpiBytes) } return } // writeBig serializes a *big.Int to w. func writeBig(w io.Writer, i *big.Int) os.Error { return writeMPI(w, uint16(i.BitLen()), i.Bytes()) }<|fim▁end|>
buf[1] = byte(length) n = 2 } else if length < 8384 { length -= 192
<|file_name|>test27_sticker.py<|end_file_name|><|fim▁begin|>import sys import time from pprint import pprint import telepot from telepot.namedtuple import StickerSet TOKEN = sys.argv[1] USER_ID = long(sys.argv[2]) STICKER_SET = sys.argv[3] bot = telepot.Bot(TOKEN) <|fim▁hole|>f = bot.uploadStickerFile(USER_ID, open('gandhi.png', 'rb')) print 'Uploaded Gandhi' bot.addStickerToSet(USER_ID, STICKER_SET, f['file_id'], u'\U0001f60a') bot.addStickerToSet(USER_ID, STICKER_SET, open('lincoln.png', 'rb'), u'\U0001f60a') print 'Added Gandhi and Lincoln to set' s = bot.getStickerSet(STICKER_SET) pprint(s) ss = StickerSet(**s) for s in ss.stickers: bot.deleteStickerFromSet(s.file_id) print 'Deleted', s.file_id time.sleep(3) # throttle s = bot.getStickerSet(STICKER_SET) pprint(s)<|fim▁end|>
<|file_name|>title.service.ts<|end_file_name|><|fim▁begin|>import { Inject, Injectable, Injector } from '@angular/core'; import { DOCUMENT, Title } from '@angular/platform-browser'; import { ActivatedRoute, Router } from '@angular/router'; import { MenuService } from '../menu/menu.service'; /* tslint:disable */ /** * 设置标题 * @see http://ng-alain.com/docs/service#TitleService */ @Injectable() export class TitleService { private _prefix = ''; private _suffix = ''; private _separator = ' - '; private _reverse = false; private _default = 'Not Page Name'; constructor( private injector: Injector, private title: Title, private menuSrv: MenuService, @Inject(DOCUMENT) private doc: any) { } /** 设置分隔符 */ set separator(value: string) { this._separator = value; } /** 设置前缀 */ set prefix(value: string) { this._prefix = value; } /** 设置后缀 */ set suffix(value: string) { this._suffix = value; } /** 设置是否反转 */ set reverse(value: boolean) { this._reverse = value; } /** 设置默认标题名 */ set default(value: string) { this._default = value; } private getByElement(): string { const el = this.doc.querySelector('.content__title h1') || this.doc.querySelector('pro-header h1.title'); if (el) { return el.firstChild.textContent.trim(); } return ''; } private getByRoute(): string { let next = this.injector.get(ActivatedRoute); while (next.firstChild) next = next.firstChild; return next.snapshot && next.snapshot.data && next.snapshot.data.title; } private getByMenu(): string { const menus = this.menuSrv.getPathByUrl(this.injector.get(Router).url); if (!menus || menus.length <= 0) return ''; const item = menus[menus.length - 1]; let title; return item.text; } /** * 设置标题,若不指定具体名称,则按以顺序获取: * - 路由配置 `{ data: { nzTitle:'page name' } }` * - 根据当前 URL 解析菜单数据<|fim▁hole|> * - 页面 `content__title` 中获取 `h1` 内容 * - 默认标题名 */ setTitle(title?: string | string[]) { if (!title) { title = this.getByRoute() || this.getByMenu() || this.getByElement() || this._default; } if (title && !Array.isArray(title)) { title = [title]; } let newTitles = []; if (this._prefix) { newTitles.push(this._prefix); } if (title && title.length > 0) { newTitles.push(...(title as string[])); } if (this._suffix) { newTitles.push(this._suffix); } if (this._reverse) { newTitles = newTitles.reverse(); } this.title.setTitle(newTitles.join(this._separator)); } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- # Pluma External Tools plugin # Copyright (C) 2005-2006 Steve Frécinaux <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA __all__ = ('ExternalToolsPlugin', 'ExternalToolsWindowHelper', 'Manager', 'OutputPanel', 'Capture', 'UniqueById') import pluma import gtk from manager import Manager from library import ToolLibrary from outputpanel import OutputPanel from capture import Capture from functions import * class ToolMenu(object): ACTION_HANDLER_DATA_KEY = "ExternalToolActionHandlerData" ACTION_ITEM_DATA_KEY = "ExternalToolActionItemData" def __init__(self, library, window, menupath): super(ToolMenu, self).__init__() self._library = library self._window = window self._menupath = menupath self._merge_id = 0 self._action_group = gtk.ActionGroup("ExternalToolsPluginToolActions") self._signals = [] self.update() def deactivate(self): self.remove() def remove(self): if self._merge_id != 0: self._window.get_ui_manager().remove_ui(self._merge_id) self._window.get_ui_manager().remove_action_group(self._action_group) self._merge_id = 0 for action in self._action_group.list_actions(): handler = action.get_data(self.ACTION_HANDLER_DATA_KEY) if handler is not None: action.disconnect(handler) action.set_data(self.ACTION_ITEM_DATA_KEY, None) action.set_data(self.ACTION_HANDLER_DATA_KEY, None) self._action_group.remove_action(action) accelmap = gtk.accel_map_get() for s in self._signals: accelmap.disconnect(s) self._signals = [] def _insert_directory(self, directory, path): manager = self._window.get_ui_manager() for item in directory.subdirs: action_name = 'ExternalToolDirectory%X' % id(item) action = gtk.Action(action_name, item.name.replace('_', '__'), None, None) self._action_group.add_action(action) manager.add_ui(self._merge_id, path, action_name, action_name, gtk.UI_MANAGER_MENU, False) self._insert_directory(item, path + '/' + action_name) for item in directory.tools: action_name = 'ExternalToolTool%X' % id(item) action = gtk.Action(action_name, item.name.replace('_', '__'), item.comment, None) handler = action.connect("activate", capture_menu_action, self._window, item) action.set_data(self.ACTION_ITEM_DATA_KEY, item) action.set_data(self.ACTION_HANDLER_DATA_KEY, handler) # Make sure to replace accel accelpath = '<Actions>/ExternalToolsPluginToolActions/%s' % (action_name, ) if item.shortcut: key, mod = gtk.accelerator_parse(item.shortcut) gtk.accel_map_change_entry(accelpath, key, mod, True) self._signals.append(gtk.accel_map_get().connect('changed::%s' % (accelpath,), self.on_accelmap_changed, item)) self._action_group.add_action_with_accel(action, item.shortcut) manager.add_ui(self._merge_id, path, action_name, action_name, gtk.UI_MANAGER_MENUITEM, False) def on_accelmap_changed(self, accelmap, path, key, mod, tool): tool.shortcut = gtk.accelerator_name(key, mod) tool.save() self._window.get_data("ExternalToolsPluginWindowData").update_manager(tool) def update(self): self.remove() self._merge_id = self._window.get_ui_manager().new_merge_id() self._insert_directory(self._library.tree, self._menupath) self._window.get_ui_manager().insert_action_group(self._action_group, -1) self.filter(self._window.get_active_document()) def filter_language(self, language, item): if not item.languages: return True if not language and 'plain' in item.languages: return True if language and (language.get_id() in item.languages): return True else: return False def filter(self, document): if document is None: return titled = document.get_uri() is not None remote = not document.is_local() states = { 'all' : True, 'local': titled and not remote, 'remote': titled and remote, 'titled': titled, 'untitled': not titled, } language = document.get_language() for action in self._action_group.list_actions(): item = action.get_data(self.ACTION_ITEM_DATA_KEY) if item is not None: action.set_visible(states[item.applicability] and self.filter_language(language, item)) class ExternalToolsWindowHelper(object): def __init__(self, plugin, window): super(ExternalToolsWindowHelper, self).__init__() self._window = window self._plugin = plugin self._library = ToolLibrary() manager = window.get_ui_manager() self._action_group = gtk.ActionGroup('ExternalToolsPluginActions') self._action_group.set_translation_domain('pluma') self._action_group.add_actions([('ExternalToolManager', None, _('Manage _External Tools...'), None, _("Opens the External Tools Manager"), lambda action: plugin.open_dialog()), ('ExternalTools', None, _('External _Tools'), None, _("External tools"),<|fim▁hole|> <ui> <menubar name="MenuBar"> <menu name="ToolsMenu" action="Tools"> <placeholder name="ToolsOps_4"> <separator/> <menu name="ExternalToolsMenu" action="ExternalTools"> <placeholder name="ExternalToolPlaceholder"/> </menu> <separator/> </placeholder> <placeholder name="ToolsOps_5"> <menuitem name="ExternalToolManager" action="ExternalToolManager"/> </placeholder> </menu> </menubar> </ui>""" self._merge_id = manager.add_ui_from_string(ui_string) self.menu = ToolMenu(self._library, self._window, "/MenuBar/ToolsMenu/ToolsOps_4/ExternalToolsMenu/ExternalToolPlaceholder") manager.ensure_update() # Create output console self._output_buffer = OutputPanel(self._plugin.get_data_dir(), window) bottom = window.get_bottom_panel() bottom.add_item(self._output_buffer.panel, _("Shell Output"), gtk.STOCK_EXECUTE) def update_ui(self): self.menu.filter(self._window.get_active_document()) self._window.get_ui_manager().ensure_update() def deactivate(self): manager = self._window.get_ui_manager() self.menu.deactivate() manager.remove_ui(self._merge_id) manager.remove_action_group(self._action_group) manager.ensure_update() bottom = self._window.get_bottom_panel() bottom.remove_item(self._output_buffer.panel) def update_manager(self, tool): self._plugin.update_manager(tool) class ExternalToolsPlugin(pluma.Plugin): WINDOW_DATA_KEY = "ExternalToolsPluginWindowData" def __init__(self): super(ExternalToolsPlugin, self).__init__() self._manager = None self._manager_default_size = None ToolLibrary().set_locations(os.path.join(self.get_data_dir(), 'tools')) def activate(self, window): helper = ExternalToolsWindowHelper(self, window) window.set_data(self.WINDOW_DATA_KEY, helper) def deactivate(self, window): window.get_data(self.WINDOW_DATA_KEY).deactivate() window.set_data(self.WINDOW_DATA_KEY, None) def update_ui(self, window): window.get_data(self.WINDOW_DATA_KEY).update_ui() def create_configure_dialog(self): return self.open_dialog() def open_dialog(self): if not self._manager: self._manager = Manager(self.get_data_dir()) if self._manager_default_size: self._manager.dialog.set_default_size(*self._manager_default_size) self._manager.dialog.connect('destroy', self.on_manager_destroy) window = pluma.app_get_default().get_active_window() self._manager.run(window) return self._manager.dialog def update_manager(self, tool): if not self._manager: return self._manager.tool_changed(tool, True) def on_manager_destroy(self, dialog): self._manager_default_size = [dialog.allocation.width, dialog.allocation.height] self._manager = None # ex:ts=4:et:<|fim▁end|>
None)]) manager.insert_action_group(self._action_group, -1) ui_string = """
<|file_name|>shift.py<|end_file_name|><|fim▁begin|>import numpy from chainer.backends import cuda from chainer import function_node from chainer.utils import type_check def _pair(x): if hasattr(x, '__getitem__'): return x return x, x class Shift(function_node.FunctionNode): def __init__(self, ksize=3, dilate=1): super(Shift, self).__init__() self.kh, self.kw = _pair(ksize) if self.kh % 2 != 1: raise ValueError('kh must be odd') if self.kw % 2 != 1: raise ValueError('kw must be odd') self.dy, self.dx = _pair(dilate) def check_type_forward(self, in_types): n_in = in_types.size() type_check.expect(n_in == 1) x_type = in_types[0] type_check.expect( x_type.dtype.kind == 'f', x_type.ndim == 4, x_type.shape[1] >= self.kh * self.kw, ) def forward_cpu(self, inputs): x = inputs[0] b, c, h, w = x.shape<|fim▁hole|> 'constant') n_groups = self.kh * self.kw group_size = c // n_groups ret = [] for i, group_idx in enumerate(range(n_groups)): # Make sure that center group is last if group_idx == (n_groups - 1) // 2: group_idx = n_groups - 1 elif group_idx == (n_groups - 1): group_idx = (n_groups - 1) // 2 ky = (group_idx // self.kw) - py // abs(self.dy) kx = (group_idx % self.kw) - px // abs(self.dx) hs = py + -ky * self.dy ws = px + -kx * self.dx he = hs + h we = ws + w cs = i * group_size ce = (i + 1) * group_size if i < n_groups - 1 else None ret.append(x[:, cs:ce, hs:he, ws:we]) return numpy.concatenate(ret, axis=1), def forward_gpu(self, inputs): x = inputs[0] b, c, h, w = x.shape y = cuda.cupy.empty_like(x) cuda.elementwise( 'raw T x, int32 c, int32 h, int32 w,' 'int32 kh, int32 kw,' 'int32 dy, int32 dx', 'T y', ''' int b0 = i / (c * h * w); int rest = i % (c * h * w); int c0 = rest / (h * w); rest %= h * w; int out_row = rest / w; int out_col = rest % w; int n_groups = kh * kw; int group_size = c / n_groups; int group_idx = c0 / group_size; // Make sure that center group is last if (group_idx == (n_groups - 1) / 2) { group_idx = n_groups - 1; } else if (group_idx == n_groups - 1) { group_idx = (n_groups - 1) / 2; } int ky = (group_idx / kw) - kh / 2; int kx = (group_idx % kw) - kw / 2; if (group_idx >= n_groups) { ky = 0; kx = 0; } int in_row = -ky * dy + out_row; int in_col = -kx * dx + out_col; if (in_row >= 0 && in_row < h && in_col >= 0 && in_col < w) { y = x[b0 * c * h * w + c0 * h * w + in_row * w + in_col]; } else { y = 0; } ''', 'shift_gpu')(x, c, h, w, self.kh, self.kw, self.dy, self.dx, y) return y, def backward(self, indexes, grad_outputs): return shift(grad_outputs[0], ksize=(self.kh, self.kw), dilate=(-self.dy, -self.dx)), def shift(x, ksize=3, dilate=1): """Shift function. See: `Shift: A Zero FLOP, Zero Parameter Alternative to Spatial \ Convolutions <https://arxiv.org/abs/1711.08141>`_ Args: x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \ :class:`cupy.ndarray`): Input variable of shape :math:`(n, c, h, w)`. ksize (int or pair of ints): Size of filters (a.k.a. kernels). ``ksize=k`` and ``ksize=(k, k)`` are equivalent. dilate (int or pair of ints): Dilation factor of filter applications. ``dilate=d`` and ``dilate=(d, d)`` are equivalent. Returns: ~chainer.Variable: Output variable of same shape as ``x``. """ fnode = Shift(ksize, dilate) y, = fnode.apply((x,)) return y<|fim▁end|>
py = self.kh // 2 * abs(self.dy) px = self.kw // 2 * abs(self.dx) x = numpy.pad(x, ((0, 0), (0, 0), (py, py), (px, px)),
<|file_name|>button.rs<|end_file_name|><|fim▁begin|>//! Easy use of buttons. use peripheral; /// The user button. pub static BUTTONS: [Button; 1] = [Button { i: 0 }]; /// A single button. pub struct Button { i: u8, } impl Button { /// Read the state of the button. pub fn pressed(&self) -> bool { let idr = &peripheral::gpioa().idr; match self.i { 0 => idr.read().idr0(), _ => false } } } <|fim▁hole|>/// /// - Must be called once /// - Must be called in an interrupt-free environment pub unsafe fn init() { let rcc = peripheral::rcc_mut(); // RCC: Enable GPIOA rcc.ahb1enr.modify(|_, w| w.gpioaen(true)); } /// An enum over the Buttons, each Button is associated but its name. pub enum Buttons { /// User User, } impl Buttons { /// Read the state of this button. pub fn pressed(&self) -> bool { match *self { Buttons::User => BUTTONS[0].pressed() } } }<|fim▁end|>
/// Initializes the necessary stuff to enable the button. /// /// # Safety
<|file_name|>platform-service.ts<|end_file_name|><|fim▁begin|>///<reference path="../.d.ts"/> "use strict"; import path = require("path"); import shell = require("shelljs"); import util = require("util"); import constants = require("./../constants"); import helpers = require("./../common/helpers"); import semver = require("semver"); export class PlatformService implements IPlatformService { private static TNS_MODULES_FOLDER_NAME = "tns_modules"; constructor(private $devicesServices: Mobile.IDevicesServices, private $errors: IErrors, private $fs: IFileSystem, private $logger: ILogger, private $npmInstallationManager: INpmInstallationManager, private $platformsData: IPlatformsData, private $projectData: IProjectData, private $projectDataService: IProjectDataService, private $prompter: IPrompter, private $commandsService: ICommandsService, private $options: IOptions, private $broccoliBuilder: IBroccoliBuilder, private $pluginsService: IPluginsService, private $projectFilesManager: IProjectFilesManager) { } public addPlatforms(platforms: string[]): IFuture<void> { return (() => { var platformsDir = this.$projectData.platformsDir; this.$fs.ensureDirectoryExists(platformsDir).wait(); _.each(platforms, platform => { this.addPlatform(platform.toLowerCase()).wait(); }); }).future<void>()(); } private addPlatform(platform: string): IFuture<void> { return(() => { var parts = platform.split("@"); platform = parts[0]; var version = parts[1]; this.validatePlatform(platform); var platformPath = path.join(this.$projectData.platformsDir, platform); if (this.$fs.exists(platformPath).wait()) { this.$errors.fail("Platform %s already added", platform); } var platformData = this.$platformsData.getPlatformData(platform); // Copy platform specific files in platforms dir var platformProjectService = platformData.platformProjectService; platformProjectService.validate().wait(); // Log the values for project this.$logger.trace("Creating NativeScript project for the %s platform", platform); this.$logger.trace("Path: %s", platformData.projectRoot); this.$logger.trace("Package: %s", this.$projectData.projectId); this.$logger.trace("Name: %s", this.$projectData.projectName); this.$logger.out("Copying template files..."); var packageToInstall = ""; var npmOptions: IStringDictionary = { pathToSave: path.join(this.$projectData.platformsDir, platform) }; if(this.$options.frameworkPath) { packageToInstall = this.$options.frameworkPath; } else { packageToInstall = platformData.frameworkPackageName; npmOptions["version"] = version; } var downloadedPackagePath = this.$npmInstallationManager.install(packageToInstall, npmOptions).wait(); var frameworkDir = path.join(downloadedPackagePath, constants.PROJECT_FRAMEWORK_FOLDER_NAME); frameworkDir = path.resolve(frameworkDir); try { this.addPlatformCore(platformData, frameworkDir).wait(); } catch(err) { this.$fs.deleteDirectory(platformPath).wait(); throw err; } this.$logger.out("Project successfully created."); }).future<void>()(); } private addPlatformCore(platformData: IPlatformData, frameworkDir: string): IFuture<void> { return (() => { platformData.platformProjectService.createProject(platformData.projectRoot, frameworkDir).wait(); var installedVersion = this.$fs.readJson(path.join(frameworkDir, "../", "package.json")).wait().version; if(this.$options.frameworkPath && this.$fs.getFsStats(this.$options.frameworkPath).wait().isFile() && !this.$options.symlink) { // Need to remove unneeded node_modules folder // One level up is the runtime module and one above is the node_modules folder. this.$fs.deleteDirectory(path.join(frameworkDir, "../../")).wait(); } platformData.platformProjectService.interpolateData(platformData.projectRoot).wait(); platformData.platformProjectService.afterCreateProject(platformData.projectRoot).wait(); this.$projectDataService.initialize(this.$projectData.projectDir); this.$projectDataService.setValue(platformData.frameworkPackageName, {version: installedVersion}).wait(); }).future<void>()(); } public getInstalledPlatforms(): IFuture<string[]> { return(() => { if(!this.$fs.exists(this.$projectData.platformsDir).wait()) { return []; } var subDirs = this.$fs.readDirectory(this.$projectData.platformsDir).wait(); return _.filter(subDirs, p => this.$platformsData.platformsNames.indexOf(p) > -1); }).future<string[]>()(); } public getAvailablePlatforms(): IFuture<string[]> { return (() => { var installedPlatforms = this.getInstalledPlatforms().wait(); return _.filter(this.$platformsData.platformsNames, p => { return installedPlatforms.indexOf(p) < 0 && this.isPlatformSupportedForOS(p); // Only those not already installed }); }).future<string[]>()(); } public getPreparedPlatforms(): IFuture<string[]> { return (() => { return _.filter(this.$platformsData.platformsNames, p => { return this.isPlatformPrepared(p).wait(); }); }).future<string[]>()(); } public preparePlatform(platform: string): IFuture<void> { return (() => { this.validatePlatform(platform); platform = platform.toLowerCase(); var platformData = this.$platformsData.getPlatformData(platform); let appDestinationDirectoryPath = path.join(platformData.appDestinationDirectoryPath, constants.APP_FOLDER_NAME); let lastModifiedTime = this.$fs.exists(appDestinationDirectoryPath).wait() ? this.$fs.getFsStats(appDestinationDirectoryPath).wait().mtime : null; // Copy app folder to native project this.$fs.ensureDirectoryExists(appDestinationDirectoryPath).wait(); var appSourceDirectoryPath = path.join(this.$projectData.projectDir, constants.APP_FOLDER_NAME); // Delete the destination app in order to prevent EEXIST errors when symlinks are used. let contents = this.$fs.readDirectory(appDestinationDirectoryPath).wait(); _(contents) .filter(directoryName => directoryName !== "tns_modules") .each(directoryName => this.$fs.deleteDirectory(path.join(appDestinationDirectoryPath, directoryName)).wait()) .value(); shell.cp("-Rf", appSourceDirectoryPath, platformData.appDestinationDirectoryPath); // Copy App_Resources to project root folder this.$fs.ensureDirectoryExists(platformData.appResourcesDestinationDirectoryPath).wait(); // Should be deleted var appResourcesDirectoryPath = path.join(appDestinationDirectoryPath, constants.APP_RESOURCES_FOLDER_NAME); if (this.$fs.exists(appResourcesDirectoryPath).wait()) { platformData.platformProjectService.prepareAppResources(appResourcesDirectoryPath).wait(); shell.cp("-Rf", path.join(appResourcesDirectoryPath, platformData.normalizedPlatformName, "*"), platformData.appResourcesDestinationDirectoryPath); this.$fs.deleteDirectory(appResourcesDirectoryPath).wait(); } platformData.platformProjectService.prepareProject().wait(); // Process node_modules folder this.$pluginsService.ensureAllDependenciesAreInstalled().wait(); var tnsModulesDestinationPath = path.join(platformData.appDestinationDirectoryPath, constants.APP_FOLDER_NAME, PlatformService.TNS_MODULES_FOLDER_NAME); this.$broccoliBuilder.prepareNodeModules(tnsModulesDestinationPath, this.$projectData.projectDir, platform, lastModifiedTime).wait(); // Process platform specific files let directoryPath = path.join(platformData.appDestinationDirectoryPath, constants.APP_FOLDER_NAME); let excludedDirs = [constants.APP_RESOURCES_FOLDER_NAME]; this.$projectFilesManager.processPlatformSpecificFiles(directoryPath, platform, excludedDirs).wait(); this.$logger.out("Project successfully prepared"); }).future<void>()(); } public buildPlatform(platform: string): IFuture<void> { return (() => { platform = platform.toLowerCase(); this.preparePlatform(platform).wait(); var platformData = this.$platformsData.getPlatformData(platform); platformData.platformProjectService.buildProject(platformData.projectRoot).wait(); this.$logger.out("Project successfully built"); }).future<void>()(); } public runPlatform(platform: string): IFuture<void> { return (() => { platform = platform.toLowerCase(); if (this.$options.emulator) { this.deployOnEmulator(platform).wait(); } else { this.deployOnDevice(platform).wait(); } }).future<void>()(); } public removePlatforms(platforms: string[]): IFuture<void> { return (() => { this.$projectDataService.initialize(this.$projectData.projectDir); _.each(platforms, platform => { this.validatePlatformInstalled(platform); let platformData = this.$platformsData.getPlatformData(platform); var platformDir = path.join(this.$projectData.platformsDir, platform); this.$fs.deleteDirectory(platformDir).wait(); this.$projectDataService.removeProperty(platformData.frameworkPackageName).wait(); this.$logger.out(`Platform ${platform} successfully removed.`); }); }).future<void>()(); } public updatePlatforms(platforms: string[]): IFuture<void> { return (() => { _.each(platforms, platform => { var parts = platform.split("@"); platform = parts[0].toLowerCase(); var version = parts[1]; this.validatePlatformInstalled(platform); this.updatePlatform(platform, version).wait(); }); }).future<void>()(); } public deployOnDevice(platform: string): IFuture<void> { return (() => { platform = platform.toLowerCase(); var platformData = this.$platformsData.getPlatformData(platform); var cachedDeviceOption = this.$options.forDevice; this.$options.forDevice = true; this.buildPlatform(platform).wait(); this.$options.forDevice = !!cachedDeviceOption; // Get latest package that is produced from build var packageFile = this.getLatestApplicationPackageForDevice(platformData).wait().packageName; this.$logger.out("Using ", packageFile); this.$devicesServices.initialize({platform: platform, deviceId: this.$options.device}).wait(); var action = (device: Mobile.IDevice): IFuture<void> => { return (() => { device.deploy(packageFile, this.$projectData.projectId).wait(); if (!this.$options.justlaunch) { device.openDeviceLogStream(); } }).future<void>()(); }; this.$devicesServices.execute(action).wait(); this.$commandsService.tryExecuteCommand("device", ["run", this.$projectData.projectId]).wait(); }).future<void>()(); } public deployOnEmulator(platform: string): IFuture<void> { return (() => { this.validatePlatformInstalled(platform); platform = platform.toLowerCase(); var platformData = this.$platformsData.getPlatformData(platform); var emulatorServices = platformData.emulatorServices; emulatorServices.checkAvailability().wait(); emulatorServices.checkDependencies().wait(); if(!this.$options.availableDevices) { this.buildPlatform(platform).wait(); var packageFile = this.getLatestApplicationPackageForEmulator(platformData).wait().packageName; this.$logger.out("Using ", packageFile); var logFilePath = path.join(platformData.projectRoot, this.$projectData.projectName, "emulator.log"); } emulatorServices.startEmulator(packageFile, { stderrFilePath: logFilePath, stdoutFilePath: logFilePath, appId: this.$projectData.projectId }).wait(); }).future<void>()(); } public validatePlatform(platform: string): void { if(!platform) { this.$errors.fail("No platform specified.") } var parts = platform.split("@"); platform = parts[0].toLowerCase(); if (!this.isValidPlatform(platform)) { this.$errors.fail("Invalid platform %s. Valid platforms are %s.", platform, helpers.formatListOfNames(this.$platformsData.platformsNames)); } if (!this.isPlatformSupportedForOS(platform)) { this.$errors.fail("Applications for platform %s can not be built on this OS - %s", platform, process.platform); } } public validatePlatformInstalled(platform: string): void { this.validatePlatform(platform); if (!this.isPlatformInstalled(platform).wait()) { this.$errors.fail("The platform %s is not added to this project. Please use 'tns platform add <platform>'", platform); } } public addLibrary(platform: string, libraryPath: string): IFuture<void> { return (() => { if (!this.$fs.exists(libraryPath).wait()) { this.$errors.failWithoutHelp("The path %s does not exist", libraryPath); } else { var platformData = this.$platformsData.getPlatformData(platform); platformData.platformProjectService.addLibrary(libraryPath).wait(); } }).future<void>()(); } private isPlatformInstalled(platform: string): IFuture<boolean> { return this.$fs.exists(path.join(this.$projectData.platformsDir, platform.toLowerCase())); } private isValidPlatform(platform: string) { return this.$platformsData.getPlatformData(platform); } private isPlatformSupportedForOS(platform: string): boolean { var targetedOS = this.$platformsData.getPlatformData(platform).targetedOS; if(!targetedOS || targetedOS.indexOf("*") >= 0 || targetedOS.indexOf(process.platform) >= 0) { return true; } return false; } private isPlatformPrepared(platform: string): IFuture<boolean> { var platformData = this.$platformsData.getPlatformData(platform); return platformData.platformProjectService.isPlatformPrepared(platformData.projectRoot); } private getApplicationPackages(buildOutputPath: string, validPackageNames: string[]): IFuture<IApplicationPackage[]> { return (() => { // Get latest package that is produced from build var candidates = this.$fs.readDirectory(buildOutputPath).wait(); var packages = _.filter(candidates, candidate => { return _.contains(validPackageNames, candidate); }).map(currentPackage => { currentPackage = path.join(buildOutputPath, currentPackage); return { packageName: currentPackage, time: this.$fs.getFsStats(currentPackage).wait().mtime }; }); return packages; }).future<IApplicationPackage[]>()(); } private getLatestApplicationPackage(buildOutputPath: string, validPackageNames: string[]): IFuture<IApplicationPackage> { return (() => { var packages = this.getApplicationPackages(buildOutputPath, validPackageNames).wait(); if (packages.length === 0) { var packageExtName = path.extname(validPackageNames[0]); this.$errors.fail("No %s found in %s directory", packageExtName, buildOutputPath); } packages = _.sortBy(packages, pkg => pkg.time).reverse(); // We need to reverse because sortBy always sorts in ascending order return packages[0]; }).future<IApplicationPackage>()(); } public getLatestApplicationPackageForDevice(platformData: IPlatformData) { return this.getLatestApplicationPackage(platformData.deviceBuildOutputPath, platformData.validPackageNamesForDevice); } public getLatestApplicationPackageForEmulator(platformData: IPlatformData) { return this.getLatestApplicationPackage(platformData.emulatorBuildOutputPath || platformData.deviceBuildOutputPath, platformData.validPackageNamesForEmulator || platformData.validPackageNamesForDevice); } private updatePlatform(platform: string, version: string): IFuture<void> { return (() => { var platformData = this.$platformsData.getPlatformData(platform); this.$projectDataService.initialize(this.$projectData.projectDir); var data = this.$projectDataService.getValue(platformData.frameworkPackageName).wait(); var currentVersion = data && data.version ? data.version : "0.2.0"; var newVersion = version || this.$npmInstallationManager.getLatestVersion(platformData.frameworkPackageName).wait(); if(platformData.platformProjectService.canUpdatePlatform(currentVersion, newVersion).wait()) { if(!semver.valid(newVersion)) { this.$errors.fail("The version %s is not valid. The version should consists from 3 parts separated by dot.", newVersion); } if(semver.gt(currentVersion, newVersion)) { // Downgrade var isUpdateConfirmed = this.$prompter.confirm(util.format("You are going to downgrade to android runtime v.%s. Are you sure?", newVersion), () => false).wait(); if(isUpdateConfirmed) { this.updatePlatformCore(platformData, currentVersion, newVersion).wait(); } } else if(semver.eq(currentVersion, newVersion)) { this.$errors.fail("Current and new version are the same."); } else { this.updatePlatformCore(platformData, currentVersion, newVersion).wait(); } } else { var isUpdateConfirmed = this.$prompter.confirm(util.format("We need to override xcodeproj file. The old one will be saved at %s. Are you sure?", this.$options.profileDir), () => true).wait(); if(isUpdateConfirmed) { platformData.platformProjectService.updatePlatform(currentVersion, newVersion).wait(); this.updatePlatformCore(platformData, currentVersion, newVersion).wait(); } } }).future<void>()(); } private updatePlatformCore(platformData: IPlatformData, currentVersion: string, newVersion: string): IFuture<void> { return (() => { // Remove old framework files var oldFrameworkData = this.getFrameworkFiles(platformData, currentVersion).wait(); _.each(oldFrameworkData.frameworkFiles, file => { var fileToDelete = path.join(platformData.projectRoot, file); this.$logger.trace("Deleting %s", fileToDelete);<|fim▁hole|> _.each(oldFrameworkData.frameworkDirectories, dir => { var dirToDelete = path.join(platformData.projectRoot, dir); this.$logger.trace("Deleting %s", dirToDelete); this.$fs.deleteDirectory(dirToDelete).wait(); }); // Add new framework files var newFrameworkData = this.getFrameworkFiles(platformData, newVersion).wait(); var cacheDirectoryPath = this.$npmInstallationManager.getCachedPackagePath(platformData.frameworkPackageName, newVersion); _.each(newFrameworkData.frameworkFiles, file => { var sourceFile = path.join(cacheDirectoryPath, constants.PROJECT_FRAMEWORK_FOLDER_NAME, file); var destinationFile = path.join(platformData.projectRoot, file); this.$logger.trace("Replacing %s with %s", sourceFile, destinationFile); shell.cp("-f", sourceFile, destinationFile); }); _.each(newFrameworkData.frameworkDirectories, dir => { var sourceDirectory = path.join(cacheDirectoryPath, constants.PROJECT_FRAMEWORK_FOLDER_NAME, dir); var destinationDirectory = path.join(platformData.projectRoot, dir); this.$logger.trace("Copying %s to %s", sourceDirectory, destinationDirectory); shell.cp("-fR", path.join(sourceDirectory, "*"), destinationDirectory); }); // Update .tnsproject file this.$projectDataService.initialize(this.$projectData.projectDir); this.$projectDataService.setValue(platformData.frameworkPackageName, {version: newVersion}).wait(); this.$logger.out("Successfully updated to version ", newVersion); }).future<void>()(); } private getFrameworkFiles(platformData: IPlatformData, version: string): IFuture<any> { return (() => { var cachedPackagePath = this.$npmInstallationManager.getCachedPackagePath(platformData.frameworkPackageName, version); this.ensurePackageIsCached(cachedPackagePath, platformData.frameworkPackageName, version).wait(); var allFiles = this.$fs.enumerateFilesInDirectorySync(cachedPackagePath); var filteredFiles = _.filter(allFiles, file => _.contains(platformData.frameworkFilesExtensions, path.extname(file))); var allFrameworkDirectories = _.map(this.$fs.readDirectory(path.join(cachedPackagePath, constants.PROJECT_FRAMEWORK_FOLDER_NAME)).wait(), dir => path.join(cachedPackagePath, constants.PROJECT_FRAMEWORK_FOLDER_NAME, dir)); var filteredFrameworkDirectories = _.filter(allFrameworkDirectories, dir => this.$fs.getFsStats(dir).wait().isDirectory() && (_.contains(platformData.frameworkFilesExtensions, path.extname(dir)) || _.contains(platformData.frameworkDirectoriesNames, path.basename(dir)))); return { frameworkFiles: this.mapFrameworkFiles(cachedPackagePath, filteredFiles), frameworkDirectories: this.mapFrameworkFiles(cachedPackagePath, filteredFrameworkDirectories) } }).future<any>()(); } private ensurePackageIsCached(cachedPackagePath: string, packageName: string, version: string): IFuture<void> { return (() => { if(!this.$fs.exists(cachedPackagePath).wait()) { this.$npmInstallationManager.addToCache(packageName, version).wait(); } }).future<void>()(); } private mapFrameworkFiles(npmCacheDirectoryPath: string, files: string[]): string[] { return _.map(files, file => file.substr(npmCacheDirectoryPath.length + constants.PROJECT_FRAMEWORK_FOLDER_NAME.length + 1)) } } $injector.register("platformService", PlatformService);<|fim▁end|>
this.$fs.deleteFile(fileToDelete).wait(); });
<|file_name|>plotit.py<|end_file_name|><|fim▁begin|>import numpy as np import os import sys import os.path as op import matplotlib as mpl import mpl.pyplot as plt import palettable.colorbrewer as pal from datetime import datetime from cycler import cycler #plt.rc('axes', prop_cycle=cycler('color', pal.qualitative.Dark2_8.mpl_colors)+ # cycler('marker',['D','o','v','*','^','x','h','8'])) mpl.rcParams['lines.markersize'] = 10 mpl.rcParams['lines.linewidth'] = 3 thispath = op.abspath(op.dirname(__file__)) mpi = np.genfromtxt('MPICompare.txt') heat = np.genfromtxt('HeatComplete.txt') KSdiv = np.genfromtxt('Divides.txt') KSall = np.genfromtxt('KSComplete.txt') ylbl = "Time per timestep (us)" xlbl = "Number of spatial points" #mpi fig, (ax1,ax2) = plt.subplots(1,2, figsize=(14,8)) plt.suptitle("MPI and GPU performance",fontsize='large', fontweight="bold") mpiLabels = ['MPIClassic', 'MPISwept', 'GPUClassic', 'GPUShared'] for i,mp in enumerate(mpiLabels): ax1.loglog(mpi[:,0],mpi[:,i+1]) ax1.hold(True) ax2.semilogx(mpi[:,0],mpi[:,-2],mpi[:,0],mpi[:,-1]) ax1.hold(True) ax1.legend(mpiLabels, loc='upper left', fontsize='medium') ax2.legend(["Classic", "Shared"], loc='upper left', fontsize='medium') ax1.grid(alpha=0.5) ax2.grid(alpha=0.5) ax1.set_ylabel(ylbl) ax2.set_ylabel("Speedup vs MPI") ax1.set_xlabel(xlbl) ax2.set_xlabel(xlbl) plotfile = op.join(thispath,"mpiPlot.pdf") ax1.set_xlim([heat[0,0],heat[-1,0]]) ax2.set_xlim([heat[0,0],heat[-1,0]]) fig.subplots_adjust(bottom=0.08, right=0.92, top=0.92) plt.savefig(plotfile, bbox_inches='tight') #KSdiv divs = ["Divide","Multiply"] fig, (ax1,ax2) = plt.subplots(1,2, figsize=(14,8), sharey=True) plt.suptitle("Improvement to KS from division avoidance",fontsize='large', fontweight="bold") ax1.loglog(KSdiv[:,0],KSdiv[:,1], KSdiv[:,0], KSdiv[:,2]) ax1.set_title("Double Precision") ax2.loglog(KSdiv[:,0],KSdiv[:,3], KSdiv[:,0], KSdiv[:,4]) ax2.set_title("Single Precision") ax1.set_ylabel(ylbl) ax1.set_xlabel(xlbl) ax2.set_xlabel(xlbl) ax1.set_xlim([heat[0,0],heat[-1,0]]) plt.legend(divs, loc='upper left', fontsize='medium') ax1.grid(alpha=0.5) ax2.grid(alpha=0.5) plotfile = op.join(thispath,"divisionPlot.pdf") ax2.set_xlim([heat[0,0],heat[-1,0]]) plt.savefig(plotfile, bbox_inches='tight') #hand, lbl = ax.get_legend_handles_labels() #Heat complete prec = ["Double", "Single"] ksorder = mpiLabels[2:] heatorder = ['Classic', 'GPUShared', 'Hybrid'] ho=[prec[0]+" "+rd for rd in heatorder]+[prec[1]+" "+rd for rd in heatorder] fig, (ax1,ax2) = plt.subplots(1,2, figsize=(14,8)) plt.suptitle("Heat",fontsize='large', fontweight="bold") ax1.loglog(heat[:,0],heat[:,1], heat[:,0], heat[:,2], heat[:,0], heat[:,3]) ax1.hold(True) ax1.loglog(heat[:,0],heat[:,6], heat[:,0], heat[:,7], heat[:,0], heat[:,8]) ax1.legend(ho, loc='upper left', fontsize='medium') ax1.set_ylabel(ylbl) ax1.set_xlabel(xlbl) ax1.set_xlim([heat[0,0],heat[-1,0]]) ho.pop(3) ho.pop(0) ax2.semilogx(heat[:,0],heat[:,4], heat[:,0], heat[:,5]) ax2.hold(True) ax2.semilogx(heat[:,0],heat[:,9], heat[:,0], heat[:,10]) ax2.legend(ho, loc='upper right', fontsize='medium') ax1.grid(alpha=0.5) ax2.grid(alpha=0.5) ax2.set_xlabel(xlbl) ax2.set_ylabel("Speedup vs Classic")<|fim▁hole|>plt.savefig(plotfile, bbox_inches='tight') reg = ["Register"] ksorder += reg #KS complete ko=[prec[0]+" "+ rd for rd in ksorder]+[prec[1]+" "+ rd for rd in ksorder] fig, (ax1,ax2) = plt.subplots(1,2, figsize=(14,8)) plt.suptitle("KS",fontsize='large', fontweight="bold") ax1.loglog(KSall[:,0],KSall[:,1], KSall[:,0], KSall[:,2], KSall[:,0], KSall[:,3]) ax1.hold(True) ax1.loglog(KSall[:,0],KSall[:,6], KSall[:,0], KSall[:,7], KSall[:,0], KSall[:,8]) ax1.legend(ko, loc='upper left', fontsize='medium') ax1.set_ylabel(ylbl) ax1.set_xlabel(xlbl) ax1.set_xlim([heat[0,0],heat[-1,0]]) ko.pop(3) ko.pop(0) ax2.semilogx(KSall[:,0],KSall[:,4], KSall[:,0], KSall[:,5]) ax2.hold(True) ax2.semilogx(KSall[:,0],KSall[:,9], KSall[:,0], KSall[:,10]) ax2.legend(ko, loc='upper right', fontsize='medium') ax1.grid(alpha=0.5) ax2.grid(alpha=0.5) ax2.set_xlabel(xlbl) ax2.set_ylabel("Speedup vs Classic") fig.tight_layout(pad=0.2, w_pad=0.75, h_pad=1.0) fig.subplots_adjust(bottom=0.08, right=0.92, top=0.92) plotfile = op.join(thispath,"KSallComplete.pdf") ax2.set_xlim([heat[0,0],heat[-1,0]]) plt.savefig(plotfile, bbox_inches='tight')<|fim▁end|>
fig.tight_layout(pad=0.2, w_pad=0.75, h_pad=1.5) fig.subplots_adjust(bottom=0.08, right=0.92, top=0.92) plotfile = op.join(thispath,"heatComplete.pdf") ax2.set_xlim([heat[0,0],heat[-1,0]])
<|file_name|>main.py<|end_file_name|><|fim▁begin|># Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # pylint: disable=invalid-name """ Hangouts Chat bot that responds to events and messages from a room synchronously. The bot formats the response using cards, inserting widgets based upon the user's original input. """ import logging from flask import Flask, render_template, request, json app = Flask(__name__) INTERACTIVE_TEXT_BUTTON_ACTION = "doTextButtonAction" INTERACTIVE_IMAGE_BUTTON_ACTION = "doImageButtonAction" INTERACTIVE_BUTTON_PARAMETER_KEY = "param_key" BOT_HEADER = 'Card Bot Python' @app.route('/', methods=['POST']) def home_post(): """Respond to POST requests to this endpoint. All requests sent to this endpoint from Hangouts Chat are POST requests. """ event_data = request.get_json() resp = None # If the bot is removed from the space, it doesn't post a message # to the space. Instead, log a message showing that the bot was removed. if event_data['type'] == 'REMOVED_FROM_SPACE': logging.info('Bot removed from %s', event_data['space']['name']) return 'OK' if event_data['type'] == 'ADDED_TO_SPACE' and event_data['space']['type'] == 'ROOM': resp = {'text': ('Thanks for adding me to {}!' .format(event_data['space']['name']))} elif event_data['type'] == 'ADDED_TO_SPACE' and event_data['space']['type'] == 'DM': resp = {'text': ('Thanks for adding me to a DM, {}!' .format(event_data['user']['displayName']))} elif event_data['type'] == 'MESSAGE': resp = create_card_response(event_data['message']['text']) elif event_data['type'] == 'CARD_CLICKED': action_name = event_data['action']['actionMethodName'] parameters = event_data['action']['parameters'] resp = respond_to_interactive_card_click(action_name, parameters) logging.info(resp) return json.jsonify(resp) @app.route('/', methods=['GET']) def home_get(): """Respond to GET requests to this endpoint. This function responds to requests with a simple HTML landing page for this App Engine instance. """ return render_template('home.html') def create_card_response(event_message): """Creates a card response based on the message sent in Hangouts Chat. See the reference for JSON keys and format for cards: https://developers.google.com/hangouts/chat/reference/message-formats/cards Args: eventMessage: the user's message to the bot """ response = dict() cards = list() widgets = list() header = None words = event_message.lower().split() for word in words: if word == 'header': header = { 'header': { 'title': BOT_HEADER, 'subtitle': 'Card header', 'imageUrl': 'https://goo.gl/5obRKj', 'imageStyle': 'IMAGE' } } elif word == 'textparagraph': widgets.append({ 'textParagraph': { 'text': '<b>This</b> is a <i>text paragraph</i>.' } }) elif word == 'keyvalue': widgets.append({ 'keyValue': { 'topLabel': 'KeyValue Widget', 'content': 'This is a KeyValue widget', 'bottomLabel': 'The bottom label', 'icon': 'STAR' } }) elif word == 'interactivetextbutton': widgets.append({ 'buttons': [ { 'textButton': { 'text': 'INTERACTIVE BUTTON', 'onClick': { 'action': { 'actionMethodName': INTERACTIVE_TEXT_BUTTON_ACTION, 'parameters': [{ 'key': INTERACTIVE_BUTTON_PARAMETER_KEY, 'value': event_message }] }<|fim▁hole|> }) elif word == 'interactiveimagebutton': widgets.append({ 'buttons': [ { 'imageButton': { 'icon': 'EVENT_SEAT', 'onClick': { 'action': { 'actionMethodName': INTERACTIVE_IMAGE_BUTTON_ACTION, 'parameters': [{ 'key': INTERACTIVE_BUTTON_PARAMETER_KEY, 'value': event_message }] } } } } ] }) elif word == 'textbutton': widgets.append({ 'buttons': [ { 'textButton': { 'text': 'TEXT BUTTON', 'onClick': { 'openLink': { 'url': 'https://developers.google.com', } } } } ] }) elif word == 'imagebutton': widgets.append({ 'buttons': [ { 'imageButton': { 'icon': 'EVENT_SEAT', 'onClick': { 'openLink': { 'url': 'https://developers.google.com', } } } } ] }) elif word == 'image': widgets.append({ 'image': { 'imageUrl': 'https://goo.gl/Bpa3Y5', 'onClick': { 'openLink': { 'url': 'https://developers.google.com' } } } }) if header is not None: cards.append(header) cards.append({'sections': [{'widgets': widgets}]}) response['cards'] = cards return response def respond_to_interactive_card_click(action_name, custom_params): """Creates a response for when the user clicks on an interactive card. See the guide for creating interactive cards https://developers.google.com/hangouts/chat/how-tos/cards-onclick Args: action_name: the name of the custom action defined in the original bot response custom_params: the parameters defined in the original bot response """ message = 'You clicked {}'.format( 'a text button' if action_name == INTERACTIVE_TEXT_BUTTON_ACTION else 'an image button') original_message = "" if custom_params[0]['key'] == INTERACTIVE_BUTTON_PARAMETER_KEY: original_message = custom_params[0]['value'] else: original_message = '<i>Cannot determine original message</i>' # If you want to respond to the same room but with a new message, # change the following value to NEW_MESSAGE. action_response = 'UPDATE_MESSAGE' return { 'actionResponse': { 'type': action_response }, 'cards': [ { 'header': { 'title': BOT_HEADER, 'subtitle': 'Interactive card clicked', 'imageUrl': 'https://goo.gl/5obRKj', 'imageStyle': 'IMAGE' } }, { 'sections': [ { 'widgets': [ { 'textParagraph': { 'text': message } }, { 'keyValue': { 'topLabel': 'Original message', 'content': original_message } } ] } ] } ] } if __name__ == '__main__': # This is used when running locally. Gunicorn is used to run the # application on Google App Engine. See entrypoint in app.yaml. app.run(host='127.0.0.1', port=8080, debug=True)<|fim▁end|>
} } } ]
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- <|fim▁hole|><|fim▁end|>
from reports.accidents.models import *
<|file_name|>JsonPathMessageConstructionInterceptor.java<|end_file_name|><|fim▁begin|>/* * Copyright 2006-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.consol.citrus.validation.json; <|fim▁hole|>import com.consol.citrus.exceptions.CitrusRuntimeException; import com.consol.citrus.exceptions.UnknownElementException; import com.consol.citrus.message.Message; import com.consol.citrus.message.MessageType; import com.consol.citrus.validation.interceptor.AbstractMessageConstructionInterceptor; import com.jayway.jsonpath.*; import net.minidev.json.parser.JSONParser; import net.minidev.json.parser.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; import java.util.HashMap; import java.util.Map; /** * @author Christoph Deppisch * @since 2.3 */ public class JsonPathMessageConstructionInterceptor extends AbstractMessageConstructionInterceptor { /** Logger */ private static Logger log = LoggerFactory.getLogger(JsonPathMessageConstructionInterceptor.class); /** Overwrites message elements before validating (via JSONPath expressions) */ private Map<String, String> jsonPathExpressions = new HashMap<>(); /** * Default constructor. */ public JsonPathMessageConstructionInterceptor() { super(); } /** * Default constructor using fields. * @param jsonPathExpressions */ public JsonPathMessageConstructionInterceptor(Map<String, String> jsonPathExpressions) { super(); this.jsonPathExpressions = jsonPathExpressions; } /** * Intercept the message payload construction and replace elements identified * via XPath expressions. * * Method parses the message payload to DOM document representation, therefore message payload * needs to be XML here. */ @Override public Message interceptMessage(Message message, String messageType, TestContext context) { if (message.getPayload() == null || !StringUtils.hasText(message.getPayload(String.class))) { return message; } String jsonPathExpression = null; try { JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); Object jsonData = parser.parse(message.getPayload(String.class)); DocumentContext documentContext = JsonPath.parse(jsonData); for (Map.Entry<String, String> entry : jsonPathExpressions.entrySet()) { jsonPathExpression = entry.getKey(); String valueExpression = context.replaceDynamicContentInString(entry.getValue()); documentContext.set(jsonPathExpression, valueExpression); if (log.isDebugEnabled()) { log.debug("Element " + jsonPathExpression + " was set to value: " + valueExpression); } } message.setPayload(jsonData.toString()); } catch (ParseException e) { throw new CitrusRuntimeException("Failed to parse JSON text", e); } catch (PathNotFoundException e) { throw new UnknownElementException(String.format("Could not find element for expression: %s", jsonPathExpression), e); } return message; } @Override public boolean supportsMessageType(String messageType) { return MessageType.JSON.toString().equalsIgnoreCase(messageType); } public void setJsonPathExpressions(Map<String, String> jsonPathExpressions) { this.jsonPathExpressions = jsonPathExpressions; } public Map<String, String> getJsonPathExpressions() { return jsonPathExpressions; } }<|fim▁end|>
import com.consol.citrus.context.TestContext;
<|file_name|>validate.go<|end_file_name|><|fim▁begin|>package cpf import ( "errors" "strconv" "strings" ) // Valid validates the cpf and return a boolean and the error if any func Valid(digits string) (bool, error) { return valid(digits) } func sanitize(data string) string { data = strings.Replace(data, ".", "", -1) data = strings.Replace(data, "-", "", -1) return data } func valid(data string) (bool, error) { data = sanitize(data) <|fim▁hole|> if strings.Contains(blacklist, data) || !check(data) { return false, errors.New("Invalid value") } return true, nil } const blacklist = `00000000000 11111111111 22222222222 33333333333 44444444444 55555555555 66666666666 77777777777 88888888888 99999999999 12345678909` func stringToIntSlice(data string) (res []int) { for _, d := range data { x, err := strconv.Atoi(string(d)) if err != nil { continue } res = append(res, x) } return } func verify(data []int, n int) int { var total int for i := 0; i < n; i++ { total += data[i] * (n + 1 - i) } total = total % 11 if total < 2 { return 0 } return 11 - total } func check(data string) bool { return checkEach(data, 9) && checkEach(data, 10) } func checkEach(data string, n int) bool { final := verify(stringToIntSlice(data), n) x, err := strconv.Atoi(string(data[n])) if err != nil { return false } return final == x }<|fim▁end|>
if len(data) != 11 { return false, errors.New("Invalid length") }