prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>bluetigers.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Author: raver2046 <[email protected]> # # URL: https://sickrage.github.io # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import re from requests.utils import dict_from_cookiejar import traceback from sickbeard import logger, tvcache from sickbeard.bs4_parser import BS4Parser from sickrage.providers.torrent.TorrentProvider import TorrentProvider class BlueTigersProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes def __init__(self): TorrentProvider.__init__(self, "BLUETIGERS") self.username = None self.password = None self.ratio = None self.token = None self.cache = tvcache.TVCache(self, min_time=10) # Only poll BLUETIGERS every 10 minutes max self.urls = { 'base_url': 'https://www.bluetigers.ca/', 'search': 'https://www.bluetigers.ca/torrents-search.php', 'login': 'https://www.bluetigers.ca/account-login.php', 'download': 'https://www.bluetigers.ca/torrents-details.php?id=%s&hit=1', } self.search_params = { "c16": 1, "c10": 1, "c130": 1, "c131": 1, "c17": 1, "c18": 1, "c19": 1 } self.url = self.urls['base_url'] def login(self): if any(dict_from_cookiejar(self.session.cookies).values()): return True login_params = { 'username': self.username, 'password': self.password, 'take_login': '1' } response = self.get_url(self.urls['login'], post_data=login_params, timeout=30) if not response: check_login = self.get_url(self.urls['base_url'], timeout=30) if re.search('account-logout.php', check_login): return True else: logger.log(u"Unable to connect to provider", logger.WARNING) return False if re.search('account-login.php', response): logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return False return True def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] if not self.login(): return results for mode in search_strings: items = [] logger.log(u"Search Mode: %s" % mode, logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': logger.log(u"Search string: {search}".format(search=search_string.decode('utf-8')), logger.DEBUG) self.search_params['search'] = search_string data = self.get_url(self.urls['search'], params=self.search_params) if not data: continue try: with BS4Parser(data, 'html5lib') as html: result_linkz = html.findAll('a', href=re.compile("torrents-details")) if not result_linkz: logger.log(u"Data returned from provider do not contains any torrent", logger.DEBUG) continue if result_linkz: for link in result_linkz: title = link.text download_url = self.urls['base_url'] + link['href'] download_url = download_url.replace("torrents-details", "download") # FIXME size = -1 seeders = 1 leechers = 0 if not title or not download_url: continue # Filter unseeded torrent # if seeders < self.minseed or leechers < self.minleech: # if mode != 'RSS': # logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG) # continue item = title, download_url, size, seeders, leechers if mode != 'RSS': logger.log(u"Found result: %s " % title, logger.DEBUG) items.append(item) except Exception: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) # For each search mode sort all the items by seeders if available items.sort(key=lambda tup: tup[3], reverse=True) results += items return results def seed_ratio(self): return self.ratio <|fim▁hole|><|fim▁end|>
provider = BlueTigersProvider()
<|file_name|>list-workspaces.controller.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015-2018 Red Hat, Inc. * This program and the accompanying materials are made * available under the terms of the Eclipse Public License 2.0 * which is available at https://www.eclipse.org/legal/epl-2.0/ * * SPDX-License-Identifier: EPL-2.0 * * Contributors: * Red Hat, Inc. - initial API and implementation */ 'use strict'; import {CheAPI} from '../../../components/api/che-api.factory'; import {CheNotification} from '../../../components/notification/che-notification.factory'; import {CheWorkspace} from '../../../components/api/workspace/che-workspace.factory'; import {CheNamespaceRegistry} from '../../../components/api/namespace/che-namespace-registry.factory'; import {ConfirmDialogService} from '../../../components/service/confirm-dialog/confirm-dialog.service'; import {CheBranding} from '../../../components/branding/che-branding.factory'; /** * @ngdoc controller * @name workspaces.list.controller:ListWorkspacesCtrl * @description This class is handling the controller for listing the workspaces * @author Ann Shumilova */ export class ListWorkspacesCtrl { static $inject = ['$log', '$mdDialog', '$q', 'lodash', 'cheAPI', 'cheNotification', 'cheBranding', 'cheWorkspace', 'cheNamespaceRegistry', 'confirmDialogService', '$scope', 'cheListHelperFactory']; $q: ng.IQService; $log: ng.ILogService; lodash: any; $mdDialog: ng.material.IDialogService; cheAPI: CheAPI; cheNotification: CheNotification; cheWorkspace: CheWorkspace; cheListHelper: che.widget.ICheListHelper; state: string; isInfoLoading: boolean; workspaceFilter: any; userWorkspaces: che.IWorkspace[]; workspaceCreationLink: string; workspacesById: Map<string, che.IWorkspace>; workspaceUsedResources: Map<string, string>; isExactMatch: boolean = false; namespaceFilter: {namespace: string}; namespaceLabels: string[]; onFilterChanged: Function; onSearchChanged: Function; cheNamespaceRegistry: CheNamespaceRegistry; private confirmDialogService: ConfirmDialogService; private ALL_NAMESPACES: string = 'All Teams'; /** * Default constructor that is using resource */ constructor($log: ng.ILogService, $mdDialog: ng.material.IDialogService, $q: ng.IQService, lodash: any, cheAPI: CheAPI, cheNotification: CheNotification, cheBranding: CheBranding, cheWorkspace: CheWorkspace, cheNamespaceRegistry: CheNamespaceRegistry, confirmDialogService: ConfirmDialogService, $scope: ng.IScope, cheListHelperFactory: che.widget.ICheListHelperFactory) { this.cheAPI = cheAPI; this.$q = $q; this.$log = $log; this.lodash = lodash; this.$mdDialog = $mdDialog; this.cheNotification = cheNotification; this.cheWorkspace = cheWorkspace; this.cheNamespaceRegistry = cheNamespaceRegistry; this.confirmDialogService = confirmDialogService; this.workspaceCreationLink = cheBranding.getWorkspace().creationLink; const helperId = 'list-workspaces'; this.cheListHelper = cheListHelperFactory.getHelper(helperId); $scope.$on('$destroy', () => { cheListHelperFactory.removeHelper(helperId); }); this.state = 'loading'; this.isInfoLoading = true; this.isExactMatch = false; this.workspaceFilter = {config: {name: ''}}; this.namespaceFilter = {namespace: ''}; // map of all workspaces with additional info by id: this.workspacesById = new Map(); // map of workspaces' used resources (consumed GBH): this.workspaceUsedResources = new Map(); this.getUserWorkspaces(); this.cheNamespaceRegistry.fetchNamespaces().then(() => { this.namespaceLabels = this.getNamespaceLabelsList(); }); // callback when search value is changed this.onSearchChanged = (str: string) => { this.workspaceFilter.config.name = str; this.cheListHelper.applyFilter('name', this.workspaceFilter); }; // callback when namespace is changed this.onFilterChanged = (label : string) => { if (label === this.ALL_NAMESPACES) { this.namespaceFilter.namespace = ''; } else { let namespace = this.cheNamespaceRegistry.getNamespaces().find((namespace: che.INamespace) => { return namespace.label === label; }); this.namespaceFilter.namespace = namespace.id; } this.isExactMatch = (label === this.ALL_NAMESPACES) ? false : true; this.cheListHelper.applyFilter('namespace', this.namespaceFilter, this.isExactMatch); }; } /** * Fetch current user's workspaces (where he is a member): */ getUserWorkspaces(): void { // fetch workspaces when initializing const promise = this.cheAPI.getWorkspace().fetchWorkspaces(); promise.then(() => { return this.updateSharedWorkspaces(); }, (error: any) => { if (error && error.status === 304) { // ok return this.updateSharedWorkspaces(); } this.state = 'error'; this.isInfoLoading = false; return this.$q.reject(error); }).then(() => { this.cheListHelper.setList(this.userWorkspaces, 'id'); }); } /** * Update the info of all user workspaces: * * @return {IPromise<any>} */ updateSharedWorkspaces(): ng.IPromise<any> { this.userWorkspaces = []; let workspaces = this.cheAPI.getWorkspace().getWorkspaces(); if (workspaces.length === 0) { this.isInfoLoading = false; } const promises: Array<ng.IPromise<any>> = []; workspaces.forEach((workspace: che.IWorkspace) => { // first check the list of already received workspace info: if (!this.workspacesById.get(workspace.id)) { const promise = this.cheWorkspace.fetchWorkspaceDetails(workspace.id) .catch((error: any) => { if (error && error.status === 304) { return this.$q.when(); } let message = error.data && error.data.message ? ' Reason: ' + error.data.message : ''; let workspaceName = this.cheWorkspace.getWorkspaceDataManager().getName(workspace); this.cheNotification.showError('Failed to retrieve workspace ' + workspaceName + ' data.' + message) ; return this.$q.reject(error); }) .then(() => { let userWorkspace = this.cheAPI.getWorkspace().getWorkspaceById(workspace.id); this.getWorkspaceInfo(userWorkspace); this.userWorkspaces.push(userWorkspace); return this.$q.when(); }); promises.push(promise); } else { let userWorkspace = this.workspacesById.get(workspace.id); this.userWorkspaces.push(userWorkspace); this.isInfoLoading = false; } }); this.state = 'loaded'; return this.$q.all(promises); } /** * Represents given account resources as a map with workspace id as a key. * * @param {any[]} resources */ processUsedResources(resources: any[]): void { resources.forEach((resource: any) => { this.workspaceUsedResources.set(resource.workspaceId, resource.memory.toFixed(2)); }); } /** * Gets all necessary workspace info to be displayed. * * @param {che.IWorkspace} workspace */ getWorkspaceInfo(workspace: che.IWorkspace): void { let promises = []; this.workspacesById.set(workspace.id, workspace); workspace.isLocked = false; workspace.usedResources = this.workspaceUsedResources.get(workspace.id); // no access to runner resources if workspace is locked:<|fim▁hole|> this.$q.all(promises).finally(() => { this.isInfoLoading = false; }); } /** * Delete all selected workspaces */ deleteSelectedWorkspaces(): void { const selectedWorkspaces = this.cheListHelper.getSelectedItems(), selectedWorkspacesIds = selectedWorkspaces.map((workspace: che.IWorkspace) => { return workspace.id; }); let queueLength = selectedWorkspacesIds.length; if (!queueLength) { this.cheNotification.showError('No such workspace.'); return; } let confirmationPromise = this.showDeleteWorkspacesConfirmation(queueLength); confirmationPromise.then(() => { let numberToDelete = queueLength; let isError = false; let deleteWorkspacePromises = []; let workspaceName; selectedWorkspacesIds.forEach((workspaceId: string) => { this.cheListHelper.itemsSelectionStatus[workspaceId] = false; let workspace = this.cheWorkspace.getWorkspaceById(workspaceId); if (!workspace) { return; } workspaceName = this.cheWorkspace.getWorkspaceDataManager().getName(workspace); let stoppedStatusPromise = this.cheWorkspace.fetchStatusChange(workspaceId, 'STOPPED'); // stop workspace if it's status is RUNNING if (workspace.status === 'RUNNING') { this.cheWorkspace.stopWorkspace(workspaceId); } // delete stopped workspace let promise = stoppedStatusPromise.then(() => { return this.cheWorkspace.deleteWorkspaceConfig(workspaceId); }).then(() => { this.workspacesById.delete(workspaceId); queueLength--; }, (error: any) => { isError = true; this.$log.error('Cannot delete workspace: ', error); }); deleteWorkspacePromises.push(promise); }); this.$q.all(deleteWorkspacePromises).finally(() => { this.getUserWorkspaces(); if (isError) { this.cheNotification.showError('Delete failed.'); } else { if (numberToDelete === 1) { this.cheNotification.showInfo(workspaceName + ' has been removed.'); } else { this.cheNotification.showInfo('Selected workspaces have been removed.'); } } }); }); } /** * Show confirmation popup before workspaces to delete * @param numberToDelete{number} * @returns {ng.IPromise<any>} */ showDeleteWorkspacesConfirmation(numberToDelete: number): ng.IPromise<any> { let content = 'Would you like to delete '; if (numberToDelete > 1) { content += 'these ' + numberToDelete + ' workspaces?'; } else { content += 'this selected workspace?'; } return this.confirmDialogService.showConfirmDialog('Remove workspaces', content, 'Delete'); } /** * Returns the list of labels of available namespaces. * * @returns {Array} array of namespaces */ getNamespaceLabelsList(): string[] { let namespaces = this.lodash.pluck(this.cheNamespaceRegistry.getNamespaces(), 'label'); if (namespaces.length > 0) { return [this.ALL_NAMESPACES].concat(namespaces); } return namespaces; } }<|fim▁end|>
if (!workspace.isLocked) { let promiseWorkspace = this.cheAPI.getWorkspace().fetchWorkspaceDetails(workspace.id); promises.push(promiseWorkspace); }
<|file_name|>common.py<|end_file_name|><|fim▁begin|>import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/ ADMINS = ( ("David Barragán", "[email protected]"), ) # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '0q)_&-!hu%%en55a&cx!a2c^7aiw*7*+^zg%_&vk9&4&-4&qg#' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = ['*'] # Database # https://docs.djangoproject.com/en/dev/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/dev/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/dev/howto/static-files/ STATIC_ROOT = os.path.join(BASE_DIR, 'static') STATIC_URL = '/static/' # Media files MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' # Application definition <|fim▁hole|> 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'monarch.base', 'monarch.documents', 'monarch.users', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'monarch.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'TEMPLATE_DEBUG': False, 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'wsgi.application' # Password validation # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ]<|fim▁end|>
INSTALLED_APPS = [
<|file_name|>nav.ts<|end_file_name|><|fim▁begin|>/** * Created by d.d on 18/07/2017. */ <|fim▁hole|> public name: string; public title: string; public path: string; public event?: () => void; constructor(name = '', title = '', path = '/', event = noon) { this.name = name; this.title = title; this.path = path; this.event = event; } }<|fim▁end|>
const noon = () => { }; export class Item {
<|file_name|>shootout-mandelbrot.rs<|end_file_name|><|fim▁begin|>// The Computer Language Benchmarks Game // http://benchmarksgame.alioth.debian.org/ // // contributed by the Rust Project Developers // Copyright (c) 2012-2014 The Rust Project Developers // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // - Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in // the documentation and/or other materials provided with the // distribution. //<|fim▁hole|>// written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // OF THE POSSIBILITY OF SUCH DAMAGE. #![feature(simd, core)] // ignore-pretty very bad with line comments use std::env; use std::io::prelude::*; use std::io; use std::simd::f64x2; use std::sync::Arc; use std::thread; const ITER: usize = 50; const LIMIT: f64 = 2.0; const WORKERS: usize = 16; fn mandelbrot<W: Write>(w: usize, mut out: W) -> io::Result<()> { assert!(WORKERS % 2 == 0); // Ensure w and h are multiples of 8. let w = (w + 7) / 8 * 8; let h = w; let chunk_size = h / WORKERS; // Account for remainders in workload division, e.g. 1000 / 16 = 62.5 let last_chunk_size = if h % WORKERS != 0 { chunk_size + h % WORKERS } else { chunk_size }; // precalc values let inverse_w_doubled = 2.0 / w as f64; let inverse_h_doubled = 2.0 / h as f64; let v_inverses = f64x2(inverse_w_doubled, inverse_h_doubled); let v_consts = f64x2(1.5, 1.0); // A lot of this code assumes this (so do other lang benchmarks) assert!(w == h); let mut precalc_r = Vec::with_capacity(w); let mut precalc_i = Vec::with_capacity(h); let precalc_futures = (0..WORKERS).map(|i| { thread::spawn(move|| { let mut rs = Vec::with_capacity(w / WORKERS); let mut is = Vec::with_capacity(w / WORKERS); let start = i * chunk_size; let end = if i == (WORKERS - 1) { start + last_chunk_size } else { (i + 1) * chunk_size }; // This assumes w == h for x in start..end { let xf = x as f64; let xy = f64x2(xf, xf); let f64x2(r, i) = xy * v_inverses - v_consts; rs.push(r); is.push(i); } (rs, is) }) }).collect::<Vec<_>>(); for res in precalc_futures { let (rs, is) = res.join().unwrap(); precalc_r.extend(rs); precalc_i.extend(is); } assert_eq!(precalc_r.len(), w); assert_eq!(precalc_i.len(), h); let arc_init_r = Arc::new(precalc_r); let arc_init_i = Arc::new(precalc_i); let data = (0..WORKERS).map(|i| { let vec_init_r = arc_init_r.clone(); let vec_init_i = arc_init_i.clone(); thread::spawn(move|| { let mut res: Vec<u8> = Vec::with_capacity((chunk_size * w) / 8); let init_r_slice = vec_init_r; let start = i * chunk_size; let end = if i == (WORKERS - 1) { start + last_chunk_size } else { (i + 1) * chunk_size }; for &init_i in &vec_init_i[start..end] { write_line(init_i, &init_r_slice, &mut res); } res }) }).collect::<Vec<_>>(); try!(writeln!(&mut out, "P4\n{} {}", w, h)); for res in data { try!(out.write_all(&res.join().unwrap())); } out.flush() } fn write_line(init_i: f64, vec_init_r: &[f64], res: &mut Vec<u8>) { let v_init_i : f64x2 = f64x2(init_i, init_i); let v_2 : f64x2 = f64x2(2.0, 2.0); const LIMIT_SQUARED: f64 = LIMIT * LIMIT; for chunk_init_r in vec_init_r.chunks(8) { let mut cur_byte = 0xff; let mut i = 0; while i < 8 { let v_init_r = f64x2(chunk_init_r[i], chunk_init_r[i + 1]); let mut cur_r = v_init_r; let mut cur_i = v_init_i; let mut r_sq = v_init_r * v_init_r; let mut i_sq = v_init_i * v_init_i; let mut b = 0; for _ in 0..ITER { let r = cur_r; let i = cur_i; cur_i = v_2 * r * i + v_init_i; cur_r = r_sq - i_sq + v_init_r; let f64x2(bit1, bit2) = r_sq + i_sq; if bit1 > LIMIT_SQUARED { b |= 2; if b == 3 { break; } } if bit2 > LIMIT_SQUARED { b |= 1; if b == 3 { break; } } r_sq = cur_r * cur_r; i_sq = cur_i * cur_i; } cur_byte = (cur_byte << 2) + b; i += 2; } res.push(cur_byte^-1); } } fn main() { let mut args = env::args(); let res = if args.len() < 2 { println!("Test mode: do not dump the image because it's not utf8, \ which interferes with the test runner."); mandelbrot(1000, io::sink()) } else { mandelbrot(args.nth(1).unwrap().parse().unwrap(), io::stdout()) }; res.unwrap(); }<|fim▁end|>
// - Neither the name of "The Computer Language Benchmarks Game" nor // the name of "The Computer Language Shootout Benchmarks" nor the // names of its contributors may be used to endorse or promote // products derived from this software without specific prior
<|file_name|>menu.js<|end_file_name|><|fim▁begin|>import { addTimer } from './timer/timer'; import { showProfile } from './profile/profile'; import { showHelp } from './help/help'; import { getAllUsers } from './timer/listUtil/getAllUsers'; import { isWorking } from './timer/listUtil/isWorking'; import { timerTimeout } from './timer/listUtil/timerTimeout'; import { refreshPoints } from './timer/listUtil/refreshPoints'; import { minusStackCalculate } from './timer/listUtil/minusStackCalculate'; import { updUsrList } from './timer/listUtil/updUsrList'; const emojiMap = { '⏺': addTimer, '👤': showProfile, '❓': showHelp }; async function menuGUI(res, bot) { const channel = bot.channel; const timerList = {}; let userList = await getAllUsers(); // eslint-disable-line const menuMsg = await channel.send('', { embed: { description: '`⏺ Start Recording! | 👤 Your Profile | ❓ Need help?`' }, });<|fim▁hole|> } await channel.send('▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬'); await menuMsg.createReactionCollector(async (reaction, user) => { if (user.bot) return false; // run the emoji command logic and remove the reaction if (emojiMap[reaction.emoji.name]) { emojiMap[reaction.emoji.name](bot, user, timerList); } await reaction.remove(user); return false; }); res(); setInterval(() => { timerTimeout(bot, timerList); }, 60000); setInterval(async () => { refreshPoints(timerList); isWorking(userList, timerList); minusStackCalculate(); updUsrList(userList); }, 60000); } module.exports = { init: (bot) => { return new Promise((res) => { menuGUI(res, bot); }); }, };<|fim▁end|>
// add reactions to message for (const emoji in emojiMap) { await menuMsg.react(emoji);
<|file_name|>05-numpy35.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
np.identity(3)
<|file_name|>command_test.go<|end_file_name|><|fim▁begin|>package cli_test import (<|fim▁hole|> "github.com/tendermint/tendermint/Godeps/_workspace/src/github.com/codegangsta/cli" ) func TestCommandDoNotIgnoreFlags(t *testing.T) { app := cli.NewApp() set := flag.NewFlagSet("test", 0) test := []string{"blah", "blah", "-break"} set.Parse(test) c := cli.NewContext(app, set, set) command := cli.Command{ Name: "test-cmd", Aliases: []string{"tc"}, Usage: "this is for testing", Description: "testing", Action: func(_ *cli.Context) {}, } err := command.Run(c) expect(t, err.Error(), "flag provided but not defined: -break") } func TestCommandIgnoreFlags(t *testing.T) { app := cli.NewApp() set := flag.NewFlagSet("test", 0) test := []string{"blah", "blah"} set.Parse(test) c := cli.NewContext(app, set, set) command := cli.Command{ Name: "test-cmd", Aliases: []string{"tc"}, Usage: "this is for testing", Description: "testing", Action: func(_ *cli.Context) {}, SkipFlagParsing: true, } err := command.Run(c) expect(t, err, nil) }<|fim▁end|>
"flag" "testing"
<|file_name|>function.js<|end_file_name|><|fim▁begin|>var contenedor = {}; var json = []; var json_active = []; var timeout; var result = {}; $(document).ready(function() { $('#buscador').keyup(function() {   if (timeout) {     clearTimeout(timeout);     timeout = null;   }    timeout = setTimeout(function() { search(); }, 100); }); $("body").on('change', '#result', function() { result = $("#result").val(); load_content(json); }); $("body").on('click', '.asc', function() { var name = $(this).parent().attr('rel'); console.log(name); $(this).removeClass("asc").addClass("desc"); order(name, true); }); $("body").on('click', '.desc', function() { var name = $(this).parent().attr('rel'); $(this).removeClass("desc").addClass("asc"); order(name, false); }); }); function update(id,parent,valor){ for (var i=0; i< json.length; i++) { if (json[i].id === id){ json[i][parent] = valor; return; } } } function load_content(json) { max = result; data = json.slice(0, max); json_active = json; $("#numRows").html(json.length); contenedor.html(''); 2 var list = table.find("th[rel]"); var html = ''; $.each(data, function(i, value) { html += '<tr id="' + value.id + '">'; $.each(list, function(index) { valor = $(this).attr('rel'); if (valor != 'acction') { if ($(this).hasClass("editable")) { html += '<td><span class="edition" rel="' + value.id + '">' + value[valor] .substring(0, 60) +'</span></td>'; } else if($(this).hasClass("view")){ if(value[valor].length > 1){ var class_1 = $(this).data('class'); html += '<td><a href="javascript:void(0)" class="'+class_1+'" rel="'+ value[valor] + '" data-id="' + value.id + '"></a></td>'; }else{ html += '<td></td>'; } }else{ html += '<td>' + value[valor] + '</td>'; } } else { html += '<td>'; $.each(acction, function(k, data) { html += '<a class="' + data.class + '" rel="' + value[data.rel] + '" href="' + data.link + value[data.parameter] + '" target="'+data.target+'" >' + data.button + '</a>'; }); html += "</td>"; } if (index >= list.length - 1) { html += '</tr>'; contenedor.append(html); html = ''; } }); }); } function selectedRow(json) { var num = result; var rows = json.length; var total = rows / num; var cant = Math.floor(total); $("#result").html(''); for (i = 0; i < cant; i++) { $("#result").append("<option value=\"" + parseInt(num) + "\">" + num + "</option>"); num = num + result; } $("#result").append("<option value=\"" + parseInt(rows) + "\">" + rows + "</option>"); } function order(prop, asc) { json = json.sort(function(a, b) { if (asc) return (a[prop] > b[prop]) ? 1 : ((a[prop] < b[prop]) ? -1 : 0); else return (b[prop] > a[prop]) ? 1 : ((b[prop] < a[prop]) ? -1 : 0); }); contenedor.html('');<|fim▁hole|> function search() { var list = table.find("th[rel]"); var data = []; var serch = $("#buscador").val(); json.forEach(function(element, index, array) { $.each(list, function(index) { valor = $(this).attr('rel'); if (element[valor]) { if (element[valor].like('%' + serch + '%')) { data.push(element); return false; } } }); }); contenedor.html(''); load_content(data); } String.prototype.like = function(search) { if (typeof search !== 'string' || this === null) { return false; } search = search.replace(new RegExp("([\\.\\\\\\+\\*\\?\\[\\^\\]\\$\\(\\)\\{\\}\\=\\!\\<\\>\\|\\:\\-])", "g"), "\\$1"); search = search.replace(/%/g, '.*').replace(/_/g, '.'); return RegExp('^' + search + '$', 'gi').test(this); } function export_csv(JSONData, ReportTitle, ShowLabel) { var arrData = typeof JSONData != 'object' ? JSON.parse(JSONData) : JSONData; var CSV = ''; // CSV += ReportTitle + '\r\n\n'; if (ShowLabel) { var row = ""; for (var index in arrData[0]) { row += index + ';'; } row = row.slice(0, -1); CSV += row + '\r\n'; } for (var i = 0; i < arrData.length; i++) { var row = ""; for (var index in arrData[i]) { row += '"' + arrData[i][index] + '";'; } row.slice(0, row.length - 1); CSV += row + '\r\n'; } if (CSV == '') { alert("Invalid data"); return; } // var fileName = "Report_"; //fileName += ReportTitle.replace(/ /g,"_"); var uri = 'data:text/csv;charset=utf-8,' + escape(CSV); var link = document.createElement("a"); link.href = uri; link.style = "visibility:hidden"; link.download = ReportTitle + ".csv"; document.body.appendChild(link); link.click(); document.body.removeChild(link); }<|fim▁end|>
load_content(json); }
<|file_name|>stack-probes-lto.rs<|end_file_name|><|fim▁begin|>// run-pass // ignore-arm // ignore-aarch64 // ignore-mips // ignore-mips64 // ignore-powerpc // ignore-s390x // ignore-sparc // ignore-sparc64 // ignore-wasm // ignore-emscripten no processes // ignore-sgx no processes // ignore-musl FIXME #31506 // ignore-pretty // compile-flags: -C lto // no-prefer-dynamic <|fim▁hole|><|fim▁end|>
include!("stack-probes.rs");
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup setup( name='flashback', packages=['flashback'], version='0.4', description='The handiest Flashback scraper in the game', author='Robin Linderborg', author_email='[email protected]', install_requires=[ 'beautifulsoup4==4.4.1',<|fim▁hole|> ], url='https://github.com/miroli/flashback', download_url='https://github.com/miroli/flashback/tarball/0.4', keywords=['flashback', 'scraping'], classifiers=[], )<|fim▁end|>
'requests==2.8.0'
<|file_name|>toggleEnabled.js<|end_file_name|><|fim▁begin|>/* Updates fields based on checkbox changes */ var PRFtoggleEnabled = function(cbox, id, type) { oldval = cbox.checked ? 0 : 1; var dataS = { "action" : "toggleEnabled", "id": id, "type": type, "oldval": oldval, }; data = $.param(dataS); $.ajax({ type: "POST", dataType: "json", url: site_admin_url + "/plugins/profile/ajax.php", data: data, success: function(result) { cbox.checked = result.newval == 1 ? true : false; try { if (result.newval == oldval) { icon = "<i class='uk-icon-exclamation-triangle'></i>&nbsp;"; } else { icon = "<i class='uk-icon-check'></i>&nbsp;"; } $.UIkit.notify(icon + result.statusMessage, {timeout: 1000,pos:'top-center'}); } catch(err) { $.UIkit.notify("<i class='uk-icon-exclamation-triangle'></i>&nbsp;" + result.statusMessage, {timeout: 1000,pos:'top-center'}); alert(result.statusMessage); } } }); return false; }; /** * Not a toggle function; this updates the 3-part date field with data * from the datepicker. * @param Date d Date object * @param string fld Field Name * @param integer tm_type 12- or 24-hour indicator, 0 = no time field */ function PRF_updateDate(d, fld, tm_type) { document.getElementById(fld + "_month").selectedIndex = d.getMonth() + 1; document.getElementById(fld + "_day").selectedIndex = d.getDate(); document.getElementById(fld + "_year").value = d.getFullYear(); // Update the time, if time fields are present. if (tm_type != 0) { var hour = d.getHours(); var ampm = 0; if (tm_type == "12") { if (hour == 0) { hour = 12; } else if (hour > 12) { hour -= 12; ampm = 1; } document.getElementById(fld + "_ampm").selectedIndex = ampm;<|fim▁hole|> document.getElementById(fld + "_hour").selectedIndex = hour; document.getElementById(fld + "_minute").selectedIndex = d.getMinutes(); } }<|fim▁end|>
}
<|file_name|>read.py<|end_file_name|><|fim▁begin|>import numpy as np from skimage import io def read_image(fn, normalize=True): """Read a CCD/CMOS image in .da format (Redshirt). [1_] Parameters ---------- fn : string The input filename. Returns ------- images : array, shape (nrow, ncol, nframes) The images (normalized by the dark frame if desired). frame_interval : float The time elapsed between frames, in milliseconds. bnc : array, shape (8, nframes) The bnc data. dark_frame : array, shape (nrow, ncol) The dark frame by which the image data should be normalized. Notes ----- Interlaced images, as produced by the option "write directly to disk", are not currently supported. References<|fim▁hole|> ---------- .. [1] http://www.redshirtimaging.com/support/dfo.html """ data = np.fromfile(fn, dtype=np.int16) header_size = 2560 header = data[:header_size] ncols, nrows = map(int, header[384:386]) # prevent int16 overflow nframes = int(header[4]) frame_interval = header[388] / 1000 acquisition_ratio = header[391] if frame_interval >= 10: frame_interval *= header[390] # dividing factor image_size = nrows * ncols * nframes bnc_start = header_size + image_size images = np.reshape(np.array(data[header_size:bnc_start]), (nrows, ncols, nframes)) bnc_end = bnc_start + 8 * acquisition_ratio * nframes bnc = np.reshape(np.array(data[bnc_start:bnc_end]), (8, nframes * acquisition_ratio)) dark_frame = np.reshape(np.array(data[bnc_end:-8]), (nrows, ncols)) if normalize: images -= dark_frame[..., np.newaxis] return images, frame_interval, bnc, dark_frame def convert_images(fns, normalize=True): for fn in fns: image, frame_interval, bnc, dark_frame = read_image(fn, normalize) out_fn = fn[:-3] + '.tif' out_fn_dark = fn[:-3] + '.dark_frame.tif' io.imsave(out_fn, np.transpose(image, (2, 0, 1)), plugin='tifffile', compress=1) io.imsave(out_fn_dark, dark_frame, plugin='tifffile', compress=1)<|fim▁end|>
<|file_name|>kyototycoon.py<|end_file_name|><|fim▁begin|>from functools import partial import time from ukt import KT_NONE from ukt import KyotoTycoon from huey.api import Huey from huey.constants import EmptyData from huey.storage import BaseStorage from huey.utils import decode <|fim▁hole|> class KyotoTycoonStorage(BaseStorage): priority = True def __init__(self, name='huey', host='127.0.0.1', port=1978, db=None, timeout=None, max_age=3600, queue_db=None, client=None, blocking=False, result_expire_time=None): super(KyotoTycoonStorage, self).__init__(name) if client is None: client = KyotoTycoon(host, port, timeout, db, serializer=KT_NONE, max_age=max_age) self.blocking = blocking self.expire_time = result_expire_time self.kt = client self._db = db self._queue_db = queue_db if queue_db is not None else db self.qname = self.name + '.q' self.sname = self.name + '.s' self.q = self.kt.Queue(self.qname, self._queue_db) self.s = self.kt.Schedule(self.sname, self._queue_db) def enqueue(self, data, priority=None): self.q.add(data, priority) def dequeue(self): if self.blocking: return self.q.bpop(timeout=30) else: return self.q.pop() def queue_size(self): return len(self.q) def enqueued_items(self, limit=None): return self.q.peek(n=limit or -1) def flush_queue(self): return self.q.clear() def convert_ts(self, ts): return int(time.mktime(ts.timetuple())) def add_to_schedule(self, data, ts, utc): self.s.add(data, self.convert_ts(ts)) def read_schedule(self, ts): return self.s.read(self.convert_ts(ts)) def schedule_size(self): return len(self.s) def scheduled_items(self, limit=None): return self.s.items(limit) def flush_schedule(self): return self.s.clear() def prefix_key(self, key): return '%s.%s' % (self.qname, decode(key)) def put_data(self, key, value, is_result=False): xt = self.expire_time if is_result else None self.kt.set(self.prefix_key(key), value, self._db, expire_time=xt) def peek_data(self, key): result = self.kt.get_bytes(self.prefix_key(key), self._db) return EmptyData if result is None else result def pop_data(self, key): if self.expire_time is not None: return self.peek_data(key) result = self.kt.seize(self.prefix_key(key), self._db) return EmptyData if result is None else result def delete_data(self, key): return self.kt.seize(self.prefix_key(key), self._db) is not None def has_data_for_key(self, key): return self.kt.exists(self.prefix_key(key), self._db) def put_if_empty(self, key, value): return self.kt.add(self.prefix_key(key), value, self._db) def result_store_size(self): return len(self.kt.match_prefix(self.prefix_key(''), db=self._db)) def result_items(self): prefix = self.prefix_key('') keys = self.kt.match_prefix(prefix, db=self._db) result = self.kt.get_bulk(keys, self._db) plen = len(prefix) return {key[plen:]: value for key, value in result.items()} def flush_results(self): prefix = self.prefix_key('') keys = self.kt.match_prefix(prefix, db=self._db) return self.kt.remove_bulk(keys, self._db) def flush_all(self): self.flush_queue() self.flush_schedule() self.flush_results() class KyotoTycoonHuey(Huey): storage_class = KyotoTycoonStorage<|fim▁end|>
<|file_name|>setup.ts<|end_file_name|><|fim▁begin|>import 'aurelia-polyfills'; import 'aurelia-loader-webpack'; import {initialize} from 'aurelia-pal-browser'; import { PLATFORM } from 'aurelia-pal'; initialize();<|fim▁hole|><|fim▁end|>
PLATFORM.moduleName('test/resources/view-model-1'); PLATFORM.moduleName('test/resources/view-model-1.html');
<|file_name|>build.d.ts<|end_file_name|><|fim▁begin|>import { MinifyOptions as TerserOptions } from "terser"; import { Configuration as WebpackConfiguration } from "webpack"; import { BundleAnalyzerPlugin } from "webpack-bundle-analyzer"; import * as WebpackChain from "webpack-chain"; import { DefinedDefaultAlgorithmAndOptions } from "compression-webpack-plugin"; import { QuasarHookParams } from "./conf"; interface InvokeParams { isClient: boolean; isServer: boolean; } interface QuasarStaticBuildConfiguration { /** * Transpile JS code with Babel * * @default true */ transpile?: boolean; /** * Add dependencies for transpiling with Babel (from node_modules, which are by default not transpiled). * It is ignored if "transpile" is not set to true. * @example [ /my-dependency/, 'my-dep', ...] */ transpileDependencies?: (RegExp | string)[]; /** * Add support for also referencing assets for custom tags props. * * @example { 'my-img-comp': 'src', 'my-avatar': [ 'src', 'placeholder-src' ]}<|fim▁hole|> /** * Extend Webpack config generated by Quasar CLI. * Equivalent to chainWebpack(), but you have direct access to the Webpack config object. */ extendWebpack?: ( config: WebpackConfiguration, invokeParams: InvokeParams ) => void; /** * Extend Webpack config generated by Quasar CLI. * Equivalent to extendWebpack(), but using [webpack-chain](https://github.com/neutrinojs/webpack-chain) instead. */ chainWebpack?: (chain: WebpackChain, invokeParams: InvokeParams) => void; /** * Prepare external services before `$ quasar dev` command runs * like starting some backend or any other service that the app relies on. * Can use async/await or directly return a Promise. */ beforeDev?: (params: QuasarHookParams) => void; /** * Run hook after Quasar dev server is started (`$ quasar dev`). * At this point, the dev server has been started and is available should you wish to do something with it. * Can use async/await or directly return a Promise. */ afterDev?: (params: QuasarHookParams) => void; /** * Run hook before Quasar builds app for production (`$ quasar build`). * At this point, the distributables folder hasn’t been created yet. * Can use async/await or directly return a Promise. */ beforeBuild?: (params: QuasarHookParams) => void; /** * Run hook after Quasar built app for production (`$ quasar build`). * At this point, the distributables folder has been created and is available * should you wish to do something with it. * Can use async/await or directly return a Promise. */ afterBuild?: (params: QuasarHookParams) => void; /** * Run hook if publishing was requested (`$ quasar build -P`), * after Quasar built app for production and the afterBuild hook (if specified) was executed. * Can use async/await or directly return a Promise. * `opts` is Object of form `{arg, distDir}`, * where “arg” is the argument supplied (if any) to -P parameter. */ onPublish?: (ops: { arg: string; distDir: string }) => void; /** * Public path of your app. * Use it when your public path is something else, * like _“<protocol>://<domain>/some/nested/folder”_ – in this case, * it means the distributables are in _“some/nested/folder”_ on your webserver. * * @default '/' */ publicPath?: string; /** * Sets [Vue Router mode](https://router.vuejs.org/guide/essentials/history-mode.html). * History mode requires configuration on your deployment web server too. * * @default 'hash' */ vueRouterMode?: "hash" | "history"; /** * @default 'index.html' */ htmlFilename?: string; /** * When using SSR+PWA, this is the name of the * PWA index html file. * * Do NOT use index.html as name as it will mess SSR up! * * @default 'offline.html' */ ssrPwaHtmlFilename?: string; /** * Folder where Quasar CLI should generate the distributables. * Relative path to project root directory. * * @default 'dist/{ctx.modeName}' For all modes except Cordova. * @default 'src-cordova/www' For Cordova mode. */ distDir?: string; /** * Source map [strategy](https://webpack.js.org/configuration/devtool/) to use. */ devtool?: WebpackConfiguration["devtool"]; /** * Add properties to `process.env` that you can use in your website/app JS code. * * @example { SOMETHING: 'someValue' } */ env?: { [index: string]: string }; /** * Gzip the distributables. * Could be either a boolean or compression plugin options object. * In addition, you can specify which file extension you want to * gzip with extension array field in replacement of compression plugin test option. * By default it's ['js','css']. * @example * { * extension: ['js','css','svg'], * threshold: 0, * minRatio: 1 * } * @default false */ gzip?: | boolean | (DefinedDefaultAlgorithmAndOptions<any> & { extensions: string[]; }); /** * Show analysis of build bundle with webpack-bundle-analyzer. * When providing an object, it represents webpack-bundle-analyzer config options. */ analyze?: boolean | BundleAnalyzerPlugin.Options; /** Include vue runtime + compiler version, instead of default Vue runtime-only. */ vueCompiler?: boolean; /** Include support for Vue Options API (default is: true) */ vueOptionsApi?: boolean; /** * Minification options. [Full list](https://github.com/webpack-contrib/terser-webpack-plugin/#minify). */ uglifyOptions?: TerserOptions; /** Options to supply to `sass-loader` for `.scss` files. */ scssLoaderOptions?: object; /** Options to supply to `sass-loader` for [`.sass`](https://github.com/webpack-contrib/sass-loader#sassoptions) files. */ sassLoaderOptions?: object; /** Options to supply to `stylus-loader`. */ stylusLoaderOptions?: object; /** Options to supply to `less-loader`. */ lessLoaderOptions?: object; /** Options to supply to `vue-loader` */ vueLoaderOptions?: object; /** * RTL options. [Full list](https://github.com/vkalinichev/postcss-rtl). * When providing an object, it is the configuration for postcss-rtl plugin, and if fromRTL is present it will only be used for client styles * When providing a function, the function will receive a boolean that is true for client side styles and false otherwise and the path to the style file * */ rtl?: | boolean | object | ((isClientCSS: boolean, resourcePath: string) => object); } /** * Following properties of `build` are automatically configured by Quasar CLI * depending on dev/build commands and Quasar mode. * You can override some, but make sure you know what you are doing. */ interface QuasarDynamicBuildConfiguration { /** Extract CSS from Vue files. */ extractCSS?: boolean; /** Use source maps. */ sourceMap?: boolean; /** Minify code (html, js, css). */ minify?: boolean; } export type QuasarBuildConfiguration = QuasarStaticBuildConfiguration & QuasarDynamicBuildConfiguration;<|fim▁end|>
*/ transformAssetsUrls?: Record<string, string | string[]>; /** Show a progress bar while compiling. */ showProgress?: boolean;
<|file_name|>Camera.ts<|end_file_name|><|fim▁begin|>export const Camera = ` <svg viewBox="0 0 28 28"> <g fill="none" fill-rule="evenodd"><|fim▁hole|> <path d="M3 3h22a2 2 0 012 2v18a2 2 0 01-2 2H3a2 2 0 01-2-2V5a2 2 0 012-2z" stroke="currentColor"/> <circle stroke="currentColor" cx="14" cy="14" r="5"/> <path d="M22 7h1" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"/> </g> </svg>`;<|fim▁end|>
<|file_name|>BaseAppCompatActivity.java<|end_file_name|><|fim▁begin|>package com.bonepeople.android.sdcardcleaner.basic; import android.content.DialogInterface; import android.content.pm.PackageManager; import android.os.Message; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; import androidx.appcompat.app.AppCompatActivity; import androidx.core.app.ActivityCompat; import androidx.core.content.ContextCompat; import com.bonepeople.android.sdcardcleaner.R; import java.util.LinkedList;<|fim▁hole|> * <p> * Created by bonepeople on 2017/12/25. */ public abstract class BaseAppCompatActivity extends AppCompatActivity { private LinkedList<BaseHandler> handlers = null; protected final BaseHandler createHandler() { if (handlers == null) handlers = new LinkedList<>(); BaseHandler handler = new BaseHandler(this); handlers.add(handler); return handler; } protected void handleMessage(Message msg) { } @Override public final void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); onRequestPermission(requestCode, grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED); } /** * 权限申请的回调函数 * * @param requestCode 权限申请的请求ID * @param granted 权限请求的结果 true-获得权限,false-拒绝权限 */ protected void onRequestPermission(int requestCode, boolean granted) { } /** * 检查权限是否已被授权 * * @param permission android.Manifest.permission * @return boolean 是否拥有对应权限 */ protected boolean checkPermission(@NonNull String permission) { return ContextCompat.checkSelfPermission(this, permission) == PackageManager.PERMISSION_GRANTED; } /** * 申请指定权限 * <p> * 申请某一权限,如需提示用户会创建AlertDialog对用户进行提示,权限申请的结果需要重写{@link #onRequestPermission(int, boolean)}接收 * </p> * * @param permission android.Manifest.permission * @param rationale 提示信息 */ protected void requestPermission(@NonNull final String permission, @Nullable String rationale, final int requestCode) { if (ActivityCompat.shouldShowRequestPermissionRationale(this, permission)) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage(rationale); builder.setPositiveButton(R.string.caption_button_positive, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { ActivityCompat.requestPermissions(BaseAppCompatActivity.this, new String[]{permission}, requestCode); } }); builder.setNegativeButton(R.string.caption_button_negative, null); builder.create().show(); } else ActivityCompat.requestPermissions(this, new String[]{permission}, requestCode); } @Override protected void onDestroy() { if (handlers != null) { for (BaseHandler handler : handlers) { handler.destroy(); } handlers.clear(); handlers = null; } super.onDestroy(); } }<|fim▁end|>
/** * 集成对Handler控制的基类
<|file_name|>account_report_account_balance.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as<|fim▁hole|># published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_balance_report(osv.osv_memory): _inherit = "account.common.account.report" _name = 'account.balance.report' _description = 'Trial Balance Report' _columns = { 'journal_ids': fields.many2many('account.journal', 'account_balance_report_journal_rel', 'account_id', 'journal_id', 'Journals', required=True), } _defaults = { 'journal_ids': [], } def _print_report(self, cr, uid, ids, data, context=None): data = self.pre_print_report(cr, uid, ids, data, context=context) return {'type': 'ir.actions.report.xml', 'report_name': 'account.account.balance', 'datas': data} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
<|file_name|>part.go<|end_file_name|><|fim▁begin|>package enmime import ( "bufio" "bytes" "encoding/base64" "fmt" "io" "mime" "mime/multipart" "net/textproto" "strings" "github.com/sloonz/go-qprintable" ) // MIMEPart is the primary interface enmine clients will use. Each MIMEPart represents // a node in the MIME multipart tree. The Content-Type, Disposition and File Name are // parsed out of the header for easier access. // // TODO Content should probably be a reader so that it does not need to be stored in // memory. type MIMEPart interface { Parent() MIMEPart // Parent of this part (can be nil) FirstChild() MIMEPart // First (top most) child of this part NextSibling() MIMEPart // Next sibling of this part Header() textproto.MIMEHeader // Header as parsed by textproto package ContentType() string // Content-Type header without parameters Disposition() string // Content-Disposition header without parameters FileName() string // File Name from disposition or type header Charset() string // Content Charset Content() []byte // Decoded content of this part (can be empty) } // memMIMEPart is an in-memory implementation of the MIMEPart interface. It will likely // choke on huge attachments. type memMIMEPart struct { parent MIMEPart firstChild MIMEPart nextSibling MIMEPart header textproto.MIMEHeader contentType string disposition string fileName string charset string content []byte } // NewMIMEPart creates a new memMIMEPart object. It does not update the parents FirstChild // attribute. func NewMIMEPart(parent MIMEPart, contentType string) *memMIMEPart { return &memMIMEPart{parent: parent, contentType: contentType} } // Parent of this part (can be nil) func (p *memMIMEPart) Parent() MIMEPart { return p.parent } // First (top most) child of this part func (p *memMIMEPart) FirstChild() MIMEPart { return p.firstChild } // Next sibling of this part func (p *memMIMEPart) NextSibling() MIMEPart { return p.nextSibling } // Header as parsed by textproto package func (p *memMIMEPart) Header() textproto.MIMEHeader { return p.header } // Content-Type header without parameters func (p *memMIMEPart) ContentType() string { return p.contentType } // Content-Disposition header without parameters func (p *memMIMEPart) Disposition() string { return p.disposition } // File Name from disposition or type header func (p *memMIMEPart) FileName() string { return p.fileName } // Content charset func (p *memMIMEPart) Charset() string { return p.charset } // Decoded content of this part (can be empty) func (p *memMIMEPart) Content() []byte { return p.content } // ParseMIME reads a MIME document from the provided reader and parses it into // tree of MIMEPart objects. func ParseMIME(reader *bufio.Reader) (MIMEPart, error) { tr := textproto.NewReader(reader) header, err := tr.ReadMIMEHeader() if err != nil { return nil, err } mediatype, params, err := mime.ParseMediaType(header.Get("Content-Type")) if err != nil { return nil, err } root := &memMIMEPart{header: header, contentType: mediatype} if strings.HasPrefix(mediatype, "multipart/") { boundary := params["boundary"] err = parseParts(root, reader, boundary) if err != nil { return nil, err } } else { // Content is text or data, decode it content, err := decodeSection(header.Get("Content-Transfer-Encoding"), reader) if err != nil { return nil, err } root.content = content } return root, nil } // parseParts recursively parses a mime multipart document. func parseParts(parent *memMIMEPart, reader io.Reader, boundary string) error { var prevSibling *memMIMEPart // Loop over MIME parts mr := multipart.NewReader(reader, boundary) for { // mrp is golang's built in mime-part mrp, err := mr.NextPart() if err != nil { if err == io.EOF { // This is a clean end-of-message signal break } return err } if len(mrp.Header) == 0 { // Empty header probably means the part didn't using the correct trailing "--" // syntax to close its boundary. We will let this slide if this this the // last MIME part. if _, err := mr.NextPart(); err != nil { if err == io.EOF || strings.HasSuffix(err.Error(), "EOF") { // This is what we were hoping for break } else { return fmt.Errorf("Error at boundary %v: %v", boundary, err) } } return fmt.Errorf("Empty header at boundary %v", boundary) } ctype := mrp.Header.Get("Content-Type") if ctype == "" { return fmt.Errorf("Missing Content-Type at boundary %v", boundary) } mediatype, mparams, err := mime.ParseMediaType(ctype) if err != nil { return err<|fim▁hole|> p.header = mrp.Header if prevSibling != nil { prevSibling.nextSibling = p } else { parent.firstChild = p } prevSibling = p // Figure out our disposition, filename disposition, dparams, err := mime.ParseMediaType(mrp.Header.Get("Content-Disposition")) if err == nil { // Disposition is optional p.disposition = disposition p.fileName = DecodeHeader(dparams["filename"]) } if p.fileName == "" && mparams["name"] != "" { p.fileName = DecodeHeader(mparams["name"]) } if p.fileName == "" && mparams["file"] != "" { p.fileName = DecodeHeader(mparams["file"]) } if p.charset == "" { p.charset = mparams["charset"] } boundary := mparams["boundary"] if boundary != "" { // Content is another multipart err = parseParts(p, mrp, boundary) if err != nil { return err } } else { // Content is text or data, decode it data, err := decodeSection(mrp.Header.Get("Content-Transfer-Encoding"), mrp) if err != nil { return err } p.content = data } } return nil } // decodeSection attempts to decode the data from reader using the algorithm listed in // the Content-Transfer-Encoding header, returning the raw data if it does not known // the encoding type. func decodeSection(encoding string, reader io.Reader) ([]byte, error) { // Default is to just read input into bytes decoder := reader switch strings.ToLower(encoding) { case "quoted-printable": decoder = qprintable.NewDecoder(qprintable.WindowsTextEncoding, reader) case "base64": cleaner := NewBase64Cleaner(reader) decoder = base64.NewDecoder(base64.StdEncoding, cleaner) } // Read bytes into buffer buf := new(bytes.Buffer) _, err := buf.ReadFrom(decoder) if err != nil { return nil, err } return buf.Bytes(), nil }<|fim▁end|>
} // Insert ourselves into tree, p is enmime's mime-part p := NewMIMEPart(parent, mediatype)
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>extern crate idna; extern crate rustc_serialize; extern crate test;<|fim▁hole|>mod punycode; mod uts46; fn main() { let mut tests = Vec::new(); { let mut add_test = |name, run| { tests.push(test::TestDescAndFn { desc: test::TestDesc::new(test::DynTestName(name)), testfn: run, }) }; punycode::collect_tests(&mut add_test); uts46::collect_tests(&mut add_test); } test::test_main(&std::env::args().collect::<Vec<_>>(), tests) }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![feature(box_syntax)] #![feature(custom_derive)] #![feature(box_raw)] #![feature(plugin)] #![feature(slice_patterns)] #![feature(step_by)] #![feature(vec_push_all)] #![feature(custom_attribute)] #![plugin(serde_macros, plugins)] #![plugin(regex_macros)] extern crate euclid; extern crate hyper; extern crate ipc_channel; #[macro_use] extern crate log; extern crate png; extern crate regex; extern crate serde; extern crate stb_image; extern crate url; extern crate util; extern crate msg; use hyper::header::{ContentType, Headers}; use hyper::http::RawStatus; use hyper::method::Method; use hyper::mime::{Mime, Attr}; use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use msg::constellation_msg::{PipelineId}; use regex::Regex; use serde::{Deserializer, Serializer}; use url::Url; use util::mem::HeapSizeOf; use std::thread; pub mod hosts; pub mod image_cache_task; pub mod net_error_list; pub mod storage_task; pub static IPV4_REGEX: Regex = regex!( r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"); pub static IPV6_REGEX: Regex = regex!(r"^([a-fA-F0-9]{0,4}[:]?){1,8}(/\d{1,3})?$"); /// Image handling. /// /// It may be surprising that this goes in the network crate as opposed to the graphics crate. /// However, image handling is generally very integrated with the network stack (especially where /// caching is involved) and as a result it must live in here. pub mod image { pub mod base; } #[derive(Clone, Deserialize, Serialize, HeapSizeOf)] pub struct LoadData { pub url: Url, pub method: Method, #[ignore_heap_size_of = "Defined in hyper"] /// Headers that will apply to the initial request only pub headers: Headers, #[ignore_heap_size_of = "Defined in hyper"] /// Headers that will apply to the initial request and any redirects pub preserved_headers: Headers, pub data: Option<Vec<u8>>, pub cors: Option<ResourceCORSData>, pub pipeline_id: Option<PipelineId>, } impl LoadData { pub fn new(url: Url, id: Option<PipelineId>) -> LoadData { LoadData { url: url, method: Method::Get, headers: Headers::new(), preserved_headers: Headers::new(), data: None, cors: None, pipeline_id: id, } } } /// A listener for asynchronous network events. Cancelling the underlying request is unsupported. pub trait AsyncResponseListener { /// The response headers for a request have been received. fn headers_available(&self, metadata: Metadata); /// A portion of the response body has been received. This data is unavailable after /// this method returned, and must be stored accordingly. fn data_available(&self, payload: Vec<u8>); /// The response is complete. If the provided status is an Err value, there is no guarantee /// that the response body was completely read. fn response_complete(&self, status: Result<(), String>); } /// Data for passing between threads/processes to indicate a particular action to /// take on a provided network listener. #[derive(Deserialize, Serialize)] pub enum ResponseAction { /// Invoke headers_available HeadersAvailable(Metadata), /// Invoke data_available DataAvailable(Vec<u8>), /// Invoke response_complete ResponseComplete(Result<(), String>) } impl ResponseAction { /// Execute the default action on a provided listener. pub fn process(self, listener: &AsyncResponseListener) { match self { ResponseAction::HeadersAvailable(m) => listener.headers_available(m), ResponseAction::DataAvailable(d) => listener.data_available(d), ResponseAction::ResponseComplete(r) => listener.response_complete(r), } } } /// A target for async networking events. Commonly used to dispatch a runnable event to another /// thread storing the wrapped closure for later execution. #[derive(Deserialize, Serialize)] pub struct AsyncResponseTarget { pub sender: IpcSender<ResponseAction>, } impl AsyncResponseTarget { pub fn invoke_with_listener(&self, action: ResponseAction) { self.sender.send(action).unwrap() } } /// A wrapper for a network load that can either be channel or event-based. #[derive(Deserialize, Serialize)] pub enum LoadConsumer { Channel(IpcSender<LoadResponse>), Listener(AsyncResponseTarget), } /// Handle to a resource task pub type ResourceTask = IpcSender<ControlMsg>; #[derive(PartialEq, Copy, Clone, Deserialize, Serialize)] pub enum IncludeSubdomains { Included, NotIncluded } #[derive(Deserialize, Serialize)] pub enum ControlMsg { /// Request the data associated with a particular URL Load(LoadData, LoadConsumer), /// Store a set of cookies for a given originating URL SetCookiesForUrl(Url, String, CookieSource), /// Retrieve the stored cookies for a given URL GetCookiesForUrl(Url, IpcSender<Option<String>>, CookieSource), /// Store a domain's STS information SetHSTSEntryForHost(String, IncludeSubdomains, u64), Exit } /// Initialized but unsent request. Encapsulates everything necessary to instruct /// the resource task to make a new request. The `load` method *must* be called before /// destruction or the task will panic. pub struct PendingAsyncLoad { resource_task: ResourceTask, url: Url, pipeline: Option<PipelineId>, guard: PendingLoadGuard, } struct PendingLoadGuard { loaded: bool, } impl PendingLoadGuard { fn neuter(&mut self) { self.loaded = true; } } impl Drop for PendingLoadGuard { fn drop(&mut self) { if !thread::panicking() { assert!(self.loaded) } } } impl PendingAsyncLoad { pub fn new(resource_task: ResourceTask, url: Url, pipeline: Option<PipelineId>) -> PendingAsyncLoad { PendingAsyncLoad { resource_task: resource_task, url: url, pipeline: pipeline, guard: PendingLoadGuard { loaded: false, }, } } /// Initiate the network request associated with this pending load. pub fn load(mut self) -> IpcReceiver<LoadResponse> { self.guard.neuter(); let load_data = LoadData::new(self.url, self.pipeline); let (sender, receiver) = ipc::channel().unwrap(); let consumer = LoadConsumer::Channel(sender); self.resource_task.send(ControlMsg::Load(load_data, consumer)).unwrap(); receiver } /// Initiate the network request associated with this pending load, using the provided target. pub fn load_async(mut self, listener: AsyncResponseTarget) { self.guard.neuter(); let load_data = LoadData::new(self.url, self.pipeline); let consumer = LoadConsumer::Listener(listener); self.resource_task.send(ControlMsg::Load(load_data, consumer)).unwrap(); } } /// Message sent in response to `Load`. Contains metadata, and a port /// for receiving the data. /// /// Even if loading fails immediately, we send one of these and the /// progress_port will provide the error. #[derive(Serialize, Deserialize)] pub struct LoadResponse { /// Metadata, such as from HTTP headers. pub metadata: Metadata, /// Port for reading data. pub progress_port: IpcReceiver<ProgressMsg>, } #[derive(Clone, Deserialize, Serialize, HeapSizeOf)] pub struct ResourceCORSData { /// CORS Preflight flag pub preflight: bool, /// Origin of CORS Request pub origin: Url, } /// Metadata about a loaded resource, such as is obtained from HTTP headers. #[derive(Clone, Deserialize, Serialize, HeapSizeOf)] pub struct Metadata { /// Final URL after redirects. pub final_url: Url, /// MIME type / subtype. pub content_type: Option<(ContentType)>, /// Character set. pub charset: Option<String>, #[ignore_heap_size_of = "Defined in hyper"] /// Headers pub headers: Option<Headers>, /// HTTP Status pub status: Option<RawStatus>, } impl Metadata { /// Metadata with defaults for everything optional. pub fn default(url: Url) -> Self { Metadata { final_url: url, content_type: None, charset: None, headers: None, // https://fetch.spec.whatwg.org/#concept-response-status-message status: Some(RawStatus(200, "OK".into())), } } /// Extract the parts of a Mime that we care about. pub fn set_content_type(&mut self, content_type: Option<&Mime>) { match content_type { None => (), Some(mime) => { self.content_type = Some(ContentType(mime.clone())); let &Mime(_, _, ref parameters) = mime; for &(ref k, ref v) in parameters { if &Attr::Charset == k { self.charset = Some(v.to_string()); } } } } } } /// The creator of a given cookie #[derive(PartialEq, Copy, Clone, Deserialize, Serialize)] pub enum CookieSource { /// An HTTP API HTTP, /// A non-HTTP API NonHTTP, } /// Messages sent in response to a `Load` message #[derive(PartialEq, Debug, Deserialize, Serialize)] pub enum ProgressMsg { /// Binary data - there may be multiple of these Payload(Vec<u8>), /// Indicates loading is complete, either successfully or not Done(Result<(), String>) } /// Convenience function for synchronously loading a whole resource. pub fn load_whole_resource(resource_task: &ResourceTask, url: Url) -> Result<(Metadata, Vec<u8>), String> { let (start_chan, start_port) = ipc::channel().unwrap(); resource_task.send(ControlMsg::Load(LoadData::new(url, None), LoadConsumer::Channel(start_chan))).unwrap(); let response = start_port.recv().unwrap(); let mut buf = vec!(); loop { match response.progress_port.recv().unwrap() { ProgressMsg::Payload(data) => buf.push_all(&data), ProgressMsg::Done(Ok(())) => return Ok((response.metadata, buf)), ProgressMsg::Done(Err(e)) => return Err(e) } } } <|fim▁hole|> let iter = ProgressMsgPortIterator { progress_port: response.progress_port }; (response.metadata, iter) } /// Iterator that reads chunks of bytes from a ProgressMsg port pub struct ProgressMsgPortIterator { progress_port: IpcReceiver<ProgressMsg>, } impl Iterator for ProgressMsgPortIterator { type Item = Vec<u8>; fn next(&mut self) -> Option<Vec<u8>> { match self.progress_port.recv().unwrap() { ProgressMsg::Payload(data) => Some(data), ProgressMsg::Done(Ok(())) => None, ProgressMsg::Done(Err(e)) => { error!("error receiving bytes: {}", e); None } } } }<|fim▁end|>
/// Load a URL asynchronously and iterate over chunks of bytes from the response. pub fn load_bytes_iter(pending: PendingAsyncLoad) -> (Metadata, ProgressMsgPortIterator) { let input_port = pending.load(); let response = input_port.recv().unwrap();
<|file_name|>ES2ChildDataset.java<|end_file_name|><|fim▁begin|>package org.openlca.olcatdb.ecospold2; import org.openlca.olcatdb.parsing.Context; /** * @Element childActivityDataset * @ContentModel (activityDescription, flowData, modellingAndValidation, * administrativeInformation, namespace:uri="##other") */ @Context(name = "childActivityDataset", parentName = "ecoSpold") public class ES2ChildDataset extends ES2Dataset { <|fim▁hole|>}<|fim▁end|>
<|file_name|>Log.cpp<|end_file_name|><|fim▁begin|>// (C) 2004 by Khaled Daham, <[email protected]> // // Singleton // #include <iterator> #include "Log.h" #include <stdio.h> #include <stdarg.h> #include <windows.h> namespace ps2emu { typedef std::map<int32, std::string>::iterator m_mapIterator; /////////////////////////////// PUBLIC /////////////////////////////////////// Log* Log::_instance = 0; <|fim▁hole|> return _instance; } void Log::Error(const std::string& message) { OutputDebugStringA(message.c_str()); } void Log::Error(const int32 errNumber) { } void Log::Error(const int32 errNumber, const std::string& message) { OutputDebugStringA(message.c_str()); } void Log::Warning(const std::string& message) { if ( !(message == *m_pLastMessage) ) { m_numLastMessage++; return; } OutputDebugStringA(message.c_str()); return; } void Log::Warning(const int32 errNumber) { return; } void Log::Trace(const std::string& message) { //OutputDebugStringA(message.c_str()); return; } void Log::Trace(int32 level, const std::string& message) { if (!m_isTraceActive) return; switch(level) { case 0: //m_pOut->AppendText(message); break; case 1: //m_pOut->AppendText(" " + message); break; case 2: //m_pOut->AppendText(" " + message); break; case 3: //m_pOut->AppendText(" " + message); break; default: //m_pOut->AppendText(message); break; } //OutputDebugStringA(message.c_str()); return; } void Log::SetTextCtrl(wxTextCtrl* out) { return; } std::string Log::Format(const char* fmt, ... ) { va_list args; char message[512]; va_start(args, fmt); vsnprintf(message, 512, fmt, args); va_end(args); std::string out(message); return out; } /////////////////////////////// PRIVATE /////////////////////////////////////// Log::Log() : m_isTraceActive(true), m_oldTraceState(true) { m_numLastMessage = 0; m_pLastMessage = new std::string(); object.insert(std::pair<int32, std::string>(0, "")); object.insert(std::pair<int32, std::string>(E_TIMEOUT, "Operation timed out")); object.insert(std::pair<int32, std::string>(E_NO_LINK, "No connection to ps2link server")); object.insert(std::pair<int32, std::string>(E_SOCK_CLOSE, "Connection reset by peer")); object.insert(std::pair<int32, std::string>(E_FILE_OPEN, "Unable to open file")); object.insert(std::pair<int32, std::string>(E_FILE_READ, "Unable to read from file")); object.insert(std::pair<int32, std::string>(E_FILE_WRITE, "Unable to write to file")); object.insert(std::pair<int32, std::string>(E_FILE_EOF, "EOF reached")); object.insert(std::pair<int32, std::string>(E_VIF_DECODE, "Bad VIF code")); } Log::~Log() { } }<|fim▁end|>
Log* Log::Instance() { if(_instance == 0) { _instance = new Log; }
<|file_name|>lc0796_rotate_string.py<|end_file_name|><|fim▁begin|>"""Leetcode 796. Rotate String Easy <|fim▁hole|> A shift on A consists of taking string A and moving the leftmost character to the rightmost position. For example, if A = 'abcde', then it will be 'bcdea' after one shift on A. Return True if and only if A can become B after some number of shifts on A. Example 1: Input: A = 'abcde', B = 'cdeab' Output: true Example 2: Input: A = 'abcde', B = 'abced' Output: false Note: A and B will have length at most 100. """ class SolutionStringConcatSubstring(object): def rotateString(self, A, B): """ :type A: str :type B: str :rtype: bool Time complexity: O(2n+2n*n)=O(n^2). Space complexity:O(n). """ # Check if lengths are not equal. if len(A) != len(B): return False # If rotate string, B is substring of concated string A + A. AA = A + A if B in AA: return True else: return False def main(): # Input: A = 'abcde', B = 'cdeab' # Output: true A = 'abcde' B = 'cdeab' print SolutionStringConcatSubstring().rotateString(A, B) # Input: A = 'abcde', B = 'abced' # Output: false A = 'abcde' B = 'abced' print SolutionStringConcatSubstring().rotateString(A, B) if __name__ == '__main__': main()<|fim▁end|>
URL: https://leetcode.com/problems/rotate-string/ We are given two strings, A and B.
<|file_name|>range1d.ts<|end_file_name|><|fim▁begin|>import {Range} from "./range" import * as p from "core/properties" export namespace Range1d { export interface Attrs extends Range.Attrs { start: number end: number } export interface Props extends Range.Props {} } export interface Range1d extends Range1d.Attrs {} export class Range1d extends Range { properties: Range1d.Props constructor(attrs?: Partial<Range1d.Attrs>) { super(attrs) } static initClass(): void { this.prototype.type = "Range1d" this.define({ start: [ p.Number, 0 ], end: [ p.Number, 1 ], }) } protected _initial_start: number protected _initial_end: number protected _set_auto_bounds(): void { if (this.bounds == 'auto') { const min = Math.min(this._initial_start, this._initial_end) const max = Math.max(this._initial_start, this._initial_end) this.setv({bounds: [min, max]}, {silent: true}) } } initialize(): void { super.initialize() this._initial_start = this.start this._initial_end = this.end this._set_auto_bounds() } get min(): number { return Math.min(this.start, this.end) } get max(): number { return Math.max(this.start, this.end) } get is_reversed(): boolean { return this.start > this.end } reset(): void { this._set_auto_bounds() if (this.start != this._initial_start || this.end != this._initial_end) this.setv({start: this._initial_start, end: this._initial_end}) else this.change.emit() } }<|fim▁hole|><|fim▁end|>
Range1d.initClass()
<|file_name|>sdn_test.go<|end_file_name|><|fim▁begin|>package integration import ( "fmt" "testing" "time" kapierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" utilwait "k8s.io/apimachinery/pkg/util/wait" restclient "k8s.io/client-go/rest" osclient "github.com/openshift/origin/pkg/client" sdnapi "github.com/openshift/origin/pkg/sdn/apis/network" testutil "github.com/openshift/origin/test/util" testserver "github.com/openshift/origin/test/util/server" ) func createProject(osClient *osclient.Client, clientConfig *restclient.Config, name string) (*sdnapi.NetNamespace, error) { _, err := testserver.CreateNewProject(osClient, *clientConfig, name, name) if err != nil { return nil, fmt.Errorf("error creating project %q: %v", name, err) } backoff := utilwait.Backoff{ Duration: 100 * time.Millisecond, Factor: 2, Steps: 5, } var netns *sdnapi.NetNamespace err = utilwait.ExponentialBackoff(backoff, func() (bool, error) { netns, err = osClient.NetNamespaces().Get(name, metav1.GetOptions{}) if kapierrors.IsNotFound(err) { return false, nil } else if err != nil { return false, err } return true, nil }) if err != nil { return nil, fmt.Errorf("could not get NetNamepsace %q: %v", name, err) } return netns, nil } func updateNetNamespace(osClient *osclient.Client, netns *sdnapi.NetNamespace, action sdnapi.PodNetworkAction, args string) (*sdnapi.NetNamespace, error) { sdnapi.SetChangePodNetworkAnnotation(netns, action, args) _, err := osClient.NetNamespaces().Update(netns) if err != nil { return nil, err } backoff := utilwait.Backoff{ Duration: 100 * time.Millisecond, Factor: 2, Steps: 5, } name := netns.Name err = utilwait.ExponentialBackoff(backoff, func() (bool, error) { netns, err = osClient.NetNamespaces().Get(name, metav1.GetOptions{}) if err != nil { return false, err } if _, _, err := sdnapi.GetChangePodNetworkAnnotation(netns); err == sdnapi.ErrorPodNetworkAnnotationNotFound { return true, nil } else { return false, nil } }) if err != nil { return nil, err } return netns, nil } func TestOadmPodNetwork(t *testing.T) { masterConfig, err := testserver.DefaultMasterOptions() if err != nil { t.Fatalf("error creating config: %v", err) } defer testserver.CleanupMasterEtcd(t, masterConfig) masterConfig.NetworkConfig.NetworkPluginName = sdnapi.MultiTenantPluginName kubeConfigFile, err := testserver.StartConfiguredMaster(masterConfig) if err != nil { t.Fatalf("error starting server: %v", err) } osClient, err := testutil.GetClusterAdminClient(kubeConfigFile) if err != nil { t.Fatalf("error getting client: %v", err) } clientConfig, err := testutil.GetClusterAdminClientConfig(kubeConfigFile) if err != nil { t.Fatalf("error getting client config: %v", err) } origNetns1, err := createProject(osClient, clientConfig, "one") if err != nil { t.Fatalf("could not create namespace %q: %v", "one", err) } origNetns2, err := createProject(osClient, clientConfig, "two") if err != nil { t.Fatalf("could not create namespace %q: %v", "two", err) } origNetns3, err := createProject(osClient, clientConfig, "three") if err != nil { t.Fatalf("could not create namespace %q: %v", "three", err) } if origNetns1.NetID == 0 || origNetns2.NetID == 0 || origNetns3.NetID == 0 { t.Fatalf("expected non-0 NetIDs, got %d, %d, %d", origNetns1.NetID, origNetns2.NetID, origNetns3.NetID) } if origNetns1.NetID == origNetns2.NetID || origNetns1.NetID == origNetns3.NetID || origNetns2.NetID == origNetns3.NetID {<|fim▁hole|> if err != nil { t.Fatalf("error updating namespace: %v", err) } if newNetns2.NetID != origNetns1.NetID { t.Fatalf("expected netns2 (%d) to be joined to netns1 (%d)", newNetns2.NetID, origNetns1.NetID) } newNetns1, err := osClient.NetNamespaces().Get("one", metav1.GetOptions{}) if err != nil { t.Fatalf("error getting refetching NetNamespace: %v", err) } if newNetns1.NetID != origNetns1.NetID { t.Fatalf("expected netns1 (%d) to be unchanged (%d)", newNetns1.NetID, origNetns1.NetID) } newNetns1, err = updateNetNamespace(osClient, origNetns1, sdnapi.GlobalPodNetwork, "") if err != nil { t.Fatalf("error updating namespace: %v", err) } if newNetns1.NetID != 0 { t.Fatalf("expected netns1 (%d) to be global", newNetns1.NetID) } newNetns2, err = osClient.NetNamespaces().Get("two", metav1.GetOptions{}) if err != nil { t.Fatalf("error getting refetching NetNamespace: %v", err) } if newNetns2.NetID != origNetns1.NetID { t.Fatalf("expected netns2 (%d) to be unchanged (%d)", newNetns2.NetID, origNetns1.NetID) } newNetns1, err = updateNetNamespace(osClient, newNetns1, sdnapi.IsolatePodNetwork, "") if err != nil { t.Fatalf("error updating namespace: %v", err) } if newNetns1.NetID == 0 { t.Fatalf("expected netns1 (%d) to be non-global", newNetns1.NetID) } if newNetns1.NetID == newNetns2.NetID || newNetns1.NetID == origNetns3.NetID { t.Fatalf("expected netns1 (%d) to be unique (not %d, %d)", newNetns1.NetID, newNetns2.NetID, origNetns3.NetID) } }<|fim▁end|>
t.Fatalf("expected unique NetIDs, got %d, %d, %d", origNetns1.NetID, origNetns2.NetID, origNetns3.NetID) } newNetns2, err := updateNetNamespace(osClient, origNetns2, sdnapi.JoinPodNetwork, "one")
<|file_name|>resource_alicloud_ssl_vpn_client_cert_test.go<|end_file_name|><|fim▁begin|>package alicloud import ( "fmt" "testing" "github.com/alibaba/terraform-provider/alicloud/connectivity" "github.com/aliyun/alibaba-cloud-sdk-go/services/vpc" "github.com/hashicorp/terraform/helper/resource" "github.com/hashicorp/terraform/terraform" ) func TestAccAlicloudSslVpnClientCert_basic(t *testing.T) { var sslVpnClientCert vpc.DescribeSslVpnClientCertResponse resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, // module name IDRefreshName: "alicloud_ssl_vpn_client_cert.foo", Providers: testAccProviders, CheckDestroy: testAccCheckSslVpnClientCertDestroy, Steps: []resource.TestStep{ resource.TestStep{ Config: testAccSslVpnClientCertConfig, Check: resource.ComposeTestCheckFunc( testAccCheckSslVpnClientCertExists("alicloud_ssl_vpn_client_cert.foo", &sslVpnClientCert), resource.TestCheckResourceAttr( "alicloud_ssl_vpn_client_cert.foo", "name", "tf-testAccSslVpnClientCertConfig"), resource.TestCheckResourceAttrSet( "alicloud_ssl_vpn_client_cert.foo", "ssl_vpn_server_id"), ), }, }, }) } func TestAccAlicloudSslVpnClientCert_update(t *testing.T) { var sslVpnClientCert vpc.DescribeSslVpnClientCertResponse resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckSslVpnClientCertDestroy, Steps: []resource.TestStep{ resource.TestStep{ Config: testAccSslVpnClientCertConfig, Check: resource.ComposeTestCheckFunc( testAccCheckSslVpnClientCertExists("alicloud_ssl_vpn_client_cert.foo", &sslVpnClientCert), resource.TestCheckResourceAttr( "alicloud_ssl_vpn_client_cert.foo", "name", "tf-testAccSslVpnClientCertConfig"), resource.TestCheckResourceAttrSet( "alicloud_ssl_vpn_client_cert.foo", "ssl_vpn_server_id"), ), }, resource.TestStep{ Config: testAccSslVpnClientCertConfigUpdate, Check: resource.ComposeTestCheckFunc( testAccCheckSslVpnClientCertExists("alicloud_ssl_vpn_client_cert.foo", &sslVpnClientCert), resource.TestCheckResourceAttr( "alicloud_ssl_vpn_client_cert.foo", "name", "tf-testAccSslVpnClientCertUpdate"), resource.TestCheckResourceAttrSet( "alicloud_ssl_vpn_client_cert.foo", "ssl_vpn_server_id"), ), }, }, })<|fim▁hole|>} func testAccCheckSslVpnClientCertExists(n string, vpn *vpc.DescribeSslVpnClientCertResponse) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] if !ok { return fmt.Errorf("Not found: %s", n) } if rs.Primary.ID == "" { return fmt.Errorf("No VPN ID is set") } client := testAccProvider.Meta().(*connectivity.AliyunClient) vpnGatewayService := VpnGatewayService{client} instance, err := vpnGatewayService.DescribeSslVpnClientCert(rs.Primary.ID) if err != nil { return err } *vpn = instance return nil } } func testAccCheckSslVpnClientCertDestroy(s *terraform.State) error { client := testAccProvider.Meta().(*connectivity.AliyunClient) vpnGatewayService := VpnGatewayService{client} for _, rs := range s.RootModule().Resources { if rs.Type != "alicloud_ssl_vpn_client_cert" { continue } instance, err := vpnGatewayService.DescribeSslVpnClientCert(rs.Primary.ID) if err != nil { if NotFoundError(err) { continue } return err } if instance.SslVpnClientCertId != "" { return fmt.Errorf("Ssl VPN client cert %s still exist", instance.SslVpnClientCertId) } } return nil } const testAccSslVpnClientCertConfig = ` variable "name" { default = "tf-testAccSslVpnClientCertConfig" } resource "alicloud_vpc" "foo" { cidr_block = "172.16.0.0/12" name = "${var.name}" } data "alicloud_zones" "default" { "available_resource_creation"= "VSwitch" } resource "alicloud_vswitch" "foo" { vpc_id = "${alicloud_vpc.foo.id}" cidr_block = "172.16.0.0/21" availability_zone = "${data.alicloud_zones.default.zones.0.id}" name = "${var.name}" } resource "alicloud_vpn_gateway" "foo" { name = "${var.name}" vpc_id = "${alicloud_vpc.foo.id}" bandwidth = "10" enable_ssl = true instance_charge_type = "PostPaid" description = "test_create_description" } resource "alicloud_ssl_vpn_server" "foo" { name = "${var.name}" vpn_gateway_id = "${alicloud_vpn_gateway.foo.id}" client_ip_pool = "192.168.0.0/16" local_subnet = "172.16.0.0/21" protocol = "UDP" cipher = "AES-128-CBC" port = "1194" compress = "false" } resource "alicloud_ssl_vpn_client_cert" "foo" { ssl_vpn_server_id = "${alicloud_ssl_vpn_server.foo.id}" name = "${var.name}" } ` const testAccSslVpnClientCertConfigUpdate = ` variable "name" { default = "tf-testAccSslVpnClientCertUpdate" } resource "alicloud_vpc" "foo" { cidr_block = "172.16.0.0/12" name = "${var.name}" } data "alicloud_zones" "default" { "available_resource_creation"= "VSwitch" } resource "alicloud_vswitch" "foo" { vpc_id = "${alicloud_vpc.foo.id}" cidr_block = "172.16.0.0/21" availability_zone = "${data.alicloud_zones.default.zones.0.id}" name = "${var.name}" } resource "alicloud_vpn_gateway" "foo" { name = "${var.name}" vpc_id = "${alicloud_vpc.foo.id}" bandwidth = "10" enable_ssl = true instance_charge_type = "PostPaid" description = "test_update_description" } resource "alicloud_ssl_vpn_server" "foo" { name = "${var.name}" vpn_gateway_id = "${alicloud_vpn_gateway.foo.id}" client_ip_pool = "192.168.0.0/16" local_subnet = "172.16.0.0/21" protocol = "UDP" cipher = "AES-128-CBC" port = "1194" compress = "false" } resource "alicloud_ssl_vpn_client_cert" "foo" { ssl_vpn_server_id = "${alicloud_ssl_vpn_server.foo.id}" name = "${var.name}" } `<|fim▁end|>
<|file_name|>executeContract.js<|end_file_name|><|fim▁begin|>/** Template Controllers @module Templates */ /** The execute contract template @class [template] elements_executeContract @constructor */ Template['elements_executeContract'].onCreated(function(){ var template = this; // Set Defaults TemplateVar.set('sending', false); // show execute part if its a custom contract if(CustomContracts.findOne({address: template.data.address})) TemplateVar.set('executionVisible', true); // check address for code web3.eth.getCode(template.data.address, function(e, code) { if(!e && code.length > 2) { TemplateVar.set(template, 'hasCode', true); } }); }); Template['elements_executeContract'].helpers({ /** Reruns when the data context changes @method (reactiveContext) */ 'reactiveContext': function() { var contractInstance = web3.eth.contract(this.jsonInterface).at(this.address); var contractFunctions = []; var contractConstants = []; _.each(this.jsonInterface, function(func, i){ func = _.clone(func);<|fim▁hole|> func.contractInstance = contractInstance; func.inputs = _.map(func.inputs, Helpers.createTemplateDataFromInput); if(func.constant){ // if it's a constant contractConstants.push(func); } else { //if its a variable contractFunctions.push(func); } } }); TemplateVar.set('contractConstants', contractConstants); TemplateVar.set('contractFunctions', contractFunctions); } }); Template['elements_executeContract'].events({ /** Select a contract function @event 'change .select-contract-function */ 'change .select-contract-function': function(e, template){ TemplateVar.set('executeData', null); // change the inputs and data field TemplateVar.set('selectedFunction', _.find(TemplateVar.get('contractFunctions'), function(contract){ return contract.name === e.currentTarget.value; })); Tracker.afterFlush(function(){ $('.abi-input').trigger('change'); }); }, /** Click the show hide button @event click .toggle-visibility */ 'click .toggle-visibility': function(){ TemplateVar.set('executionVisible', !TemplateVar.get('executionVisible')); } }); /** The contract constants template @class [template] elements_executeContract_constant @constructor */ /** Formats the values for display @method formatOutput */ var formatOutput = function(val) { if(_.isArray(val)) return _.map(val, formatOutput); else { // stringify boolean if(_.isBoolean(val)) val = val ? 'YES' : 'NO'; // convert bignumber objects val = (_.isObject(val) && val.toString) ? val.toString(10) : val; return val; } }; Template['elements_executeContract_constant'].onCreated(function(){ var template = this; // initialize our input data prior to the first call TemplateVar.set('inputs', _.map(template.data.inputs, function(input) { return Helpers.addInputValue([input], input, {})[0]; })); // call the contract functions when data changes and on new blocks this.autorun(function() { // make reactive to the latest block EthBlocks.latest; // get args for the constant function var args = TemplateVar.get('inputs') || []; // add callback args.push(function(e, r) { if(!e) { var outputs = []; // single return value if(template.data.outputs.length === 1) { template.data.outputs[0].value = r; outputs.push(template.data.outputs[0]); // multiple return values } else { outputs = _.map(template.data.outputs, function(output, i) { output.value = r[i]; return output; }); } TemplateVar.set(template, 'outputs', outputs); } }); template.data.contractInstance[template.data.name].apply(null, args); }); }); Template['elements_executeContract_constant'].helpers({ /** Formats the value if its a big number or array @method (value) */ 'value': function() { return _.isArray(this.value) ? formatOutput(this.value) : [formatOutput(this.value)]; }, /** Figures out extra data @method (extra) */ 'extra': function() { var data = formatOutput(this); // 1000000000 if (data > 1400000000 && data < 1800000000 && Math.floor(data/1000) != data/1000) { return '(' + moment(data*1000).fromNow() + ')'; } if (data == 'YES') { return '<span class="icon icon-check"></span>'; } else if (data == 'NO') { return '<span class="icon icon-ban"></span>' } return; } }); Template['elements_executeContract_constant'].events({ /** React on user input on the constant functions @event change .abi-input, input .abi-input */ 'change .abi-input, input .abi-input': function(e, template) { var inputs = Helpers.addInputValue(template.data.inputs, this, e.currentTarget); TemplateVar.set('inputs', inputs); } }); /** The contract function template @class [template] elements_executeContract_function @constructor */ Template['elements_executeContract_function'].onCreated(function(){ var template = this; // change the amount when the currency unit is changed template.autorun(function(c){ var unit = EthTools.getUnit(); if(!c.firstRun) { TemplateVar.set('amount', EthTools.toWei(template.find('input[name="amount"]').value.replace(',','.'), unit)); } }); }); Template['elements_executeContract_function'].onRendered(function(){ // Run all inputs through formatter to catch bools this.$('.abi-input').trigger('change'); }); Template['elements_executeContract_function'].helpers({ 'reactiveDataContext': function(){ if(this.inputs.length === 0) TemplateVar.set('executeData', this.contractInstance[this.name].getData()); } }); Template['elements_executeContract_function'].events({ /** Set the amount while typing @event keyup input[name="amount"], change input[name="amount"], input input[name="amount"] */ 'keyup input[name="amount"], change input[name="amount"], input input[name="amount"]': function(e, template){ var wei = EthTools.toWei(e.currentTarget.value.replace(',','.')); TemplateVar.set('amount', wei || '0'); }, /** React on user input on the execute functions @event change .abi-input, input .abi-input */ 'change .abi-input, input .abi-input': function(e, template) { var inputs = Helpers.addInputValue(template.data.inputs, this, e.currentTarget); TemplateVar.set('executeData', template.data.contractInstance[template.data.name].getData.apply(null, inputs)); }, /** Executes a transaction on contract @event click .execute */ 'click .execute': function(e, template){ var to = template.data.contractInstance.address, gasPrice = 50000000000, estimatedGas = undefined, /* (typeof mist == 'undefined')not working */ amount = TemplateVar.get('amount') || 0, selectedAccount = Helpers.getAccountByAddress(TemplateVar.getFrom('.execute-contract select[name="dapp-select-account"]', 'value')), data = TemplateVar.get('executeData'); var latestTransaction = Transactions.findOne({}, {sort: {timestamp: -1}}); if (latestTransaction && latestTransaction.gasPrice) gasPrice = latestTransaction.gasPrice; if(selectedAccount) { console.log('Providing gas: ', estimatedGas ,' + 100000'); if(selectedAccount.balance === '0') return GlobalNotification.warning({ content: 'i18n:wallet.send.error.emptyWallet', duration: 2 }); // The function to send the transaction var sendTransaction = function(estimatedGas){ TemplateVar.set('sending', true); // CONTRACT TX if(contracts['ct_'+ selectedAccount._id]) { // Load the accounts owned by user and sort by balance var accounts = EthAccounts.find({name: {$exists: true}}, {sort: {name: 1}}).fetch(); accounts.sort(Helpers.sortByBalance); // Looks for them among the wallet account owner var fromAccount = _.find(accounts, function(acc){ return (selectedAccount.owners.indexOf(acc.address)>=0); }) contracts['ct_'+ selectedAccount._id].execute.sendTransaction(to || '', amount || '', data || '', { from: fromAccount.address, gasPrice: gasPrice, gas: estimatedGas }, function(error, txHash){ TemplateVar.set(template, 'sending', false); console.log(error, txHash); if(!error) { console.log('SEND from contract', amount); addTransactionAfterSend(txHash, amount, selectedAccount.address, to, gasPrice, estimatedGas, data); FlowRouter.go('dashboard'); } else { // EthElements.Modal.hide(); GlobalNotification.error({ content: error.message, duration: 8 }); } }); // SIMPLE TX } else { web3.eth.sendTransaction({ from: selectedAccount.address, to: to, data: data, value: amount, gasPrice: gasPrice, gas: estimatedGas }, function(error, txHash){ TemplateVar.set(template, 'sending', false); console.log(error, txHash); if(!error) { console.log('SEND simple'); addTransactionAfterSend(txHash, amount, selectedAccount.address, to, gasPrice, estimatedGas, data); // FlowRouter.go('dashboard'); GlobalNotification.success({ content: 'i18n:wallet.send.transactionSent', duration: 2 }); } else { // EthElements.Modal.hide(); GlobalNotification.error({ content: error.message, duration: 8 }); } }); } }; sendTransaction(estimatedGas); } } });<|fim▁end|>
// Walk throught the jsonInterface and extract functions and constants if(func.type == 'function') {
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>#![plugin(peg_syntax_ext)] use std::fmt; use std::collections::HashMap; peg_file! gremlin("gremlin.rustpeg"); pub fn parse(g: &str) -> Result<ParsedGraphQuery, gremlin::ParseError> { let parsed = pre_parse(g); // verify all the steps actually make sense // is it a query to a single vertex or a global query? parsed } pub fn pre_parse(g: &str) -> Result<ParsedGraphQuery, gremlin::ParseError> {<|fim▁hole|>} /* returned from the peg parser we'll need to take each of the steps and use them to construct an actual GraphQuery */ pub struct ParsedGraphQuery { pub steps: Vec<RawStep> } /* scope of the query. determines if we're looking at the entire graph or just from a handful of vertices */ pub enum Scope { Global, Vertex(Vec<i64>), } /* generic step used in ParsedGraphQuery will be turned into specific steps */ #[derive(Debug)] pub struct RawStep { pub name: String, pub args: Vec<Arg>, } #[derive(Debug, Display)] pub enum Arg { Integer(i64), Float(f64), String(String), } impl RawStep { pub fn new(name: String, args: Vec<Arg>) -> RawStep { RawStep{name:name, args:args} } } impl fmt::Display for RawStep { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "RawStep {}", self.name) } }<|fim▁end|>
gremlin::query(g)
<|file_name|>qps_interarrival_test.cc<|end_file_name|><|fim▁begin|>/* * * Copyright 2015 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include <chrono> #include <iostream> // Use the C histogram rather than C++ to avoid depending on proto #include "test/core/util/histogram.h" #include "test/cpp/qps/interarrival.h" #include "test/cpp/util/test_config.h" using grpc::testing::InterarrivalTimer; using grpc::testing::RandomDistInterface; static void RunTest(RandomDistInterface&& r, int threads, std::string title) { InterarrivalTimer timer; timer.init(r, threads); grpc_histogram* h(grpc_histogram_create(0.01, 60e9)); for (int i = 0; i < 10000000; i++) { for (int j = 0; j < threads; j++) { grpc_histogram_add(h, timer.next(j)); } } std::cout << title << " Distribution" << std::endl;<|fim▁hole|> for (double pct = 0.0; pct < 100.0; pct += 1.0) { std::cout << grpc_histogram_percentile(h, pct) << "," << pct << std::endl; } grpc_histogram_destroy(h); } using grpc::testing::ExpDist; int main(int argc, char** argv) { grpc::testing::InitTest(&argc, &argv, true); RunTest(ExpDist(10.0), 5, std::string("Exponential(10)")); return 0; }<|fim▁end|>
std::cout << "Value, Percentile" << std::endl;
<|file_name|>SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertaloTest.java<|end_file_name|><|fim▁begin|>package test.unit.hu.interconnect.hr.module.personaldata.vacations; import static com.google.common.collect.Lists.newArrayList; import static hu.interconnect.hr.backend.api.enumeration.KivetelnapTipus.MUNKANAP; import static hu.interconnect.hr.backend.api.enumeration.KivetelnapTipus.PIHENONAP; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.List; import org.junit.Test; import hu.interconnect.hr.backend.api.dto.SzabadsagFelhasznalasResponseDTO; import hu.interconnect.hr.domain.Kivetelnap; import hu.interconnect.hr.domain.Szabadsag; import hu.interconnect.hr.module.exceptiondays.KivetelnapokHelper; import hu.interconnect.hr.module.personaldata.vacations.SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo; import test.builder.KivetelnapBuilder; import test.builder.SzabadsagBuilder; import test.builder.SzemelyitorzsBuilder; import test.matcher.SzabadsagFelhasznalasResponseDTOMatcher; import test.unit.AbstractBackendUnitTest; public class SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertaloTest extends AbstractBackendUnitTest { private SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo konvertalo = new SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo(new KivetelnapokHelper(new ArrayList<Kivetelnap>())); @Test public void nullIllegalArgumentExceptiontDob() { try { konvertalo.konvertal(null); fail(); } catch (IllegalArgumentException e) { assertEquals(e.getLocalizedMessage(), "A bemeno parameter null!"); } } @Test public void uresListaUresetAdVissza() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(new ArrayList<Szabadsag>()); assertThat(kapottReszletek, hasSize(0)); } @Test public void egyElemuListaEgyelemetAdVissza() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.03").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.03") .veg("2012.12.03") .munkanapokSzama(1); assertThat(kapottReszletek, contains(elvartReszletek)); } @Test public void ketEgymasMellettiElemOsszevonodikHaAKetNapKetEgymastKovetkoHetkoznapok() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.03").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.04").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.03") .veg("2012.12.04") .munkanapokSzama(2); assertThat(kapottReszletek, contains(elvartReszletek)); } @Test public void ketEgymasMellettiElemOsszevonodikHaAKetNapKozottHetvegeVan() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.07").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.10").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.07") .veg("2012.12.10") .munkanapokSzama(2); assertThat(kapottReszletek, contains(elvartReszletek)); } @Test public void csutortokEsPentekOsszevonodikDeAHetvegeNemAdodikHozza() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.06").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.07").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.06") .veg("2012.12.07") .munkanapokSzama(2); assertThat(kapottReszletek, contains(elvartReszletek)); } @Test public void hetfotolPentekigOsszevonodikDeAHetvegeNemAdodikHozza() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.03").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.04").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.05").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.06").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.07").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.03") .veg("2012.12.07") .munkanapokSzama(5); assertThat(kapottReszletek, contains(elvartReszletek)); } @Test public void pentekEsKovetkezoHetKedd() { List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.07").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.11").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek1 = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.07") .veg("2012.12.07") .munkanapokSzama(1); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek2 = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.11") .veg("2012.12.11") .munkanapokSzama(1); assertThat(kapottReszletek, contains(asList(elvartReszletek1, elvartReszletek2))); } @Test public void pentekEsKovetkezoHetKeddEsAHetvegeNormalisAHetfoPedigPihenonap() { KivetelnapokHelper kivetelnapok = new KivetelnapokHelper(newArrayList(new KivetelnapBuilder() .datum("2012.12.10") .tipus(PIHENONAP) .letrehoz())); konvertalo = new SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo(kivetelnapok); List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.07").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.11").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1)<|fim▁hole|> .kezdet("2012.12.07") .veg("2012.12.11") .munkanapokSzama(2); assertThat(kapottReszletek, contains(elvartReszletek)); } @Test public void bugfix() { KivetelnapokHelper kivetelnapok = new KivetelnapokHelper(newArrayList(new KivetelnapBuilder() .datum("2012.03.16") .tipus(PIHENONAP) .letrehoz())); konvertalo = new SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo(kivetelnapok); List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.01").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.02").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.05").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.06").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.07").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.08").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.15").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.19").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.20").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek1 = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.03.01") .veg("2012.03.08") .munkanapokSzama(6); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek2 = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.03.15") .veg("2012.03.20") .munkanapokSzama(3); assertThat(kapottReszletek, contains(asList(elvartReszletek1, elvartReszletek2))); } @Test public void vasarnapMunkanappaTeveEsErreSzabadsagKiveve() { KivetelnapokHelper kivetelnapok = new KivetelnapokHelper(newArrayList(new KivetelnapBuilder() .datum("2012.12.23") .tipus(MUNKANAP) .letrehoz())); konvertalo = new SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo(kivetelnapok); List<SzabadsagFelhasznalasResponseDTO> kapottReszletek = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.12.23").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.12.23") .veg("2012.12.23") .munkanapokSzama(1); assertThat(kapottReszletek, contains(asList(elvartReszletek))); } @Test public void bugfix2() { List<SzabadsagFelhasznalasResponseDTO> eredmeny = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.15").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.16").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.17").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.18").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.21").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.22").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.23").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.28").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.29").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.30").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2013.01.31").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek1 = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2013.01.15") .veg("2013.01.23") .munkanapokSzama(7); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek2 = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2013.01.28") .veg("2013.01.31") .munkanapokSzama(4); assertThat(eredmeny, contains(asList(elvartReszletek1, elvartReszletek2))); } @Test public void bugfix3() { KivetelnapokHelper kivetelnapok = new KivetelnapokHelper(newArrayList(new KivetelnapBuilder() .datum("2012.03.16") .tipus(PIHENONAP) .letrehoz(), new KivetelnapBuilder() .datum("2012.03.24") .tipus(MUNKANAP) .letrehoz()) ); konvertalo = new SzabadsagokatOsszefuggoFelhasznaltSzabadnapReszletekkeKonvertalo(kivetelnapok); List<SzabadsagFelhasznalasResponseDTO> eredmeny = konvertalo.konvertal(newArrayList( new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.01").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.02").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.05").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.06").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.07").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.08").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.09").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.12").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.13").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.14").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.15").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.19").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.20").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.21").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.22").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.23").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.24").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.26").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.27").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.28").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.29").letrehoz(), new SzabadsagBuilder().szemelyitorzs(new SzemelyitorzsBuilder().tsz(1).letrehoz()).nap("2012.03.30").letrehoz())); SzabadsagFelhasznalasResponseDTOMatcher elvartReszletek = new SzabadsagFelhasznalasResponseDTOMatcher() .tsz(1) .kezdet("2012.03.01") .veg("2012.03.30") .munkanapokSzama(22); assertThat(eredmeny, contains(elvartReszletek)); } }<|fim▁end|>
<|file_name|>lto-unwind.rs<|end_file_name|><|fim▁begin|>// run-pass #![allow(unused_variables)] // compile-flags:-C lto -C panic=unwind // needs-unwind // no-prefer-dynamic // ignore-emscripten no processes // ignore-sgx no processes use std::process::Command; use std::env; struct Bomb; impl Drop for Bomb { fn drop(&mut self) { println!("hurray you ran me"); } } fn main() { let mut args = env::args_os(); let me = args.next().unwrap(); if let Some(s) = args.next() {<|fim▁hole|> let _bomb = Bomb; panic!("try to catch me"); } } let s = Command::new(env::args_os().next().unwrap()).arg("foo").output(); let s = s.unwrap(); assert!(!s.status.success()); assert!(String::from_utf8_lossy(&s.stdout).contains("hurray you ran me")); }<|fim▁end|>
if &*s == "foo" {
<|file_name|>uri.hpp<|end_file_name|><|fim▁begin|>#pragma once #include "net/uri.hpp" <|fim▁hole|> using uri = net::uri; }<|fim▁end|>
namespace http {
<|file_name|>mixins.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from __future__ import absolute_import from django.views.generic.base import TemplateResponseMixin from wiki.core.plugins import registry from wiki.conf import settings class ArticleMixin(TemplateResponseMixin): """A mixin that receives an article object as a parameter (usually from a wiki decorator) and puts this information as an instance attribute and in the template context.""" def dispatch(self, request, article, *args, **kwargs): self.urlpath = kwargs.pop('urlpath', None)<|fim▁hole|> self.children_slice = [] if settings.SHOW_MAX_CHILDREN > 0: try: for child in self.article.get_children( max_num=settings.SHOW_MAX_CHILDREN + 1, articles__article__current_revision__deleted=False, user_can_read=request.user): self.children_slice.append(child) except AttributeError as e: raise Exception( "Attribute error most likely caused by wrong MPTT version. Use 0.5.3+.\n\n" + str(e)) return super(ArticleMixin, self).dispatch(request, *args, **kwargs) def get_context_data(self, **kwargs): kwargs['urlpath'] = self.urlpath kwargs['article'] = self.article kwargs['article_tabs'] = registry.get_article_tabs() kwargs['children_slice'] = self.children_slice[:20] kwargs['children_slice_more'] = len(self.children_slice) > 20 kwargs['plugins'] = registry.get_plugins() return kwargs<|fim▁end|>
self.article = article
<|file_name|>borrowed-unique-basic.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-android: FIXME(#10381) // min-lldb-version: 310 // Gdb doesn't know about UTF-32 character encoding and will print a rust char as only // its numerical value. // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:run // gdb-command:print *bool_ref // gdb-check:$1 = true // gdb-command:print *int_ref // gdb-check:$2 = -1 // gdb-command:print *char_ref // gdb-check:$3 = 97 // gdb-command:print/d *i8_ref // gdb-check:$4 = 68 // gdb-command:print *i16_ref<|fim▁hole|> // gdb-command:print *i64_ref // gdb-check:$7 = -64 // gdb-command:print *uint_ref // gdb-check:$8 = 1 // gdb-command:print/d *u8_ref // gdb-check:$9 = 100 // gdb-command:print *u16_ref // gdb-check:$10 = 16 // gdb-command:print *u32_ref // gdb-check:$11 = 32 // gdb-command:print *u64_ref // gdb-check:$12 = 64 // gdb-command:print *f32_ref // gdb-check:$13 = 2.5 // gdb-command:print *f64_ref // gdb-check:$14 = 3.5 // === LLDB TESTS ================================================================================== // lldb-command:type format add -f decimal char // lldb-command:type format add -f decimal 'unsigned char' // lldb-command:run // lldb-command:print *bool_ref // lldb-check:[...]$0 = true // lldb-command:print *int_ref // lldb-check:[...]$1 = -1 // d ebugger:print *char_ref // c heck:[...]$3 = 97 // lldb-command:print *i8_ref // lldb-check:[...]$2 = 68 // lldb-command:print *i16_ref // lldb-check:[...]$3 = -16 // lldb-command:print *i32_ref // lldb-check:[...]$4 = -32 // lldb-command:print *i64_ref // lldb-check:[...]$5 = -64 // lldb-command:print *uint_ref // lldb-check:[...]$6 = 1 // lldb-command:print *u8_ref // lldb-check:[...]$7 = 100 // lldb-command:print *u16_ref // lldb-check:[...]$8 = 16 // lldb-command:print *u32_ref // lldb-check:[...]$9 = 32 // lldb-command:print *u64_ref // lldb-check:[...]$10 = 64 // lldb-command:print *f32_ref // lldb-check:[...]$11 = 2.5 // lldb-command:print *f64_ref // lldb-check:[...]$12 = 3.5 #![allow(unused_variables)] fn main() { let bool_box: Box<bool> = box true; let bool_ref: &bool = &*bool_box; let int_box: Box<int> = box -1; let int_ref: &int = &*int_box; let char_box: Box<char> = box 'a'; let char_ref: &char = &*char_box; let i8_box: Box<i8> = box 68; let i8_ref: &i8 = &*i8_box; let i16_box: Box<i16> = box -16; let i16_ref: &i16 = &*i16_box; let i32_box: Box<i32> = box -32; let i32_ref: &i32 = &*i32_box; let i64_box: Box<i64> = box -64; let i64_ref: &i64 = &*i64_box; let uint_box: Box<uint> = box 1; let uint_ref: &uint = &*uint_box; let u8_box: Box<u8> = box 100; let u8_ref: &u8 = &*u8_box; let u16_box: Box<u16> = box 16; let u16_ref: &u16 = &*u16_box; let u32_box: Box<u32> = box 32; let u32_ref: &u32 = &*u32_box; let u64_box: Box<u64> = box 64; let u64_ref: &u64 = &*u64_box; let f32_box: Box<f32> = box 2.5; let f32_ref: &f32 = &*f32_box; let f64_box: Box<f64> = box 3.5; let f64_ref: &f64 = &*f64_box; zzz(); // #break } fn zzz() {()}<|fim▁end|>
// gdb-check:$5 = -16 // gdb-command:print *i32_ref // gdb-check:$6 = -32
<|file_name|>Graphics3dInterface.hh<|end_file_name|><|fim▁begin|>/* * Copyright 2011 Nate Koenig & Andrew Howard * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* Desc: Graphics 3d Interface for Player * Author: Nate Koenig * Date: 30 Jan 2007 */ #ifndef GRAPHICS3DINTERFACE_HH #define GRAPHICS3DINTERFACE_HH #include "GazeboInterface.hh" namespace boost { class recursive_mutex; } namespace libgazebo { // Forward declarations class Graphics3dIface; /// \brief Graphics3d interface class Graphics3dInterface : public GazeboInterface { /// \brief Constructor public: Graphics3dInterface(player_devaddr_t addr, GazeboDriver *driver, ConfigFile *cf, int section); /// \brief Destructor public: virtual ~Graphics3dInterface(); /// \brief Handle all messages. This is called from GazeboDriver public: virtual int ProcessMessage(QueuePointer &respQueue, player_msghdr_t *hdr, void *data); <|fim▁hole|> public: virtual void Update(); /// \brief Open a SHM interface when a subscription is received. /// This is called fromGazeboDriver::Subscribe public: virtual void Subscribe(); /// \brief Close a SHM interface. This is called from /// GazeboDriver::Unsubscribe public: virtual void Unsubscribe(); private: Graphics3dIface *iface; /// \brief Gazebo id. This needs to match and ID in a Gazebo WorldFile private: char *gz_id; private: static boost::recursive_mutex *mutex; }; } #endif<|fim▁end|>
/// \brief Update this interface, publish new info.
<|file_name|>16_UCSC_sources_to_ENCODE.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 # # This file is part of Progesterone pipeline. # # Progesterone pipeline is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Progesterone pipeline is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Progesterone pipeline. If not, see <https://www.gnu.org/licenses/>. # from utils.mysqldb import * import os # UCSC doe not have the pointers directly back to ENCODE, so I found them for ESR1 - by hand # encode_esr1_xps.tsv must contain 3 columns: UCSC id, encode experiment id, and encode file id ######################################### def main(): conf_file = "/home/ivana/.mysql_conf" mapping_file = "encode_esr1_xps.tsv" for dependency in [conf_file, mapping_file]: if not os.path.exists(dependency): print(dependency,"not found") exit() encode_exp_id = {} encode_file_id = {} ucsc_ids = [] with open(mapping_file,"r") as inf: for line in inf: if 'UCSC' in line: continue # header [ucsc, encode_exp, encode_file] = line.split("\t")[:3] ucsc_ids.append(ucsc) encode_exp_id[ucsc] = encode_exp encode_file_id[ucsc] = encode_file ######################### # plug in to local database db = connect_to_mysql(conf_file) cursor = db.cursor() search_db(cursor,"set autocommit=1") switch_to_db(cursor,'progesterone') # this might not be the best idea if the database grows really large # first make sure we have single entry for each of multiple ids for line in search_db(cursor,"select id, external_id from xrefs where xtype='ucsc'"): [xref_id, ucsc_str] = line ucsc_ids_stored = ucsc_str.split(",") if len(ucsc_ids_stored) <2: continue for ucsc_id in ucsc_ids_stored: store_or_update(cursor, 'xrefs', {'xtype':'ucsc', 'external_id':ucsc_id}, None) <|fim▁hole|> for line in search_db(cursor,"select id, external_id from xrefs where xtype='ucsc' and external_id not like '%,%'"): [ucsc_xref_id, ucsc_id] = line if not ucsc_id in ucsc_ids: continue encode_file_xref_id = store_or_update(cursor, 'xrefs', {'xtype':'encode', 'external_id': encode_file_id[ucsc_id]}, None) search_db(cursor, "update xrefs set parent_id=%d where id=%d" % (encode_file_xref_id, ucsc_xref_id)) encode_exp_xref_id = store_or_update(cursor, 'xrefs', {'xtype':'encode', 'external_id': encode_exp_id[ucsc_id]}, None) search_db(cursor, "update xrefs set parent_id=%d where id=%d" % (encode_exp_xref_id, encode_file_xref_id)) cursor.close() db.close() return True ######################################### ######################################## if __name__ == '__main__': main()<|fim▁end|>
# now for each single entry, make parent point to encode file, and encode file's parent to encode exp
<|file_name|>objdetect.rs<|end_file_name|><|fim▁begin|>//! Various object detection algorithms, such as Haar feature-based cascade //! classifier for object detection and histogram of oriented gradients (HOG). use super::core::*; use super::errors::*; use super::*; use failure::Error; use std::ffi::CString; use std::os::raw::{c_char, c_double, c_int}; use std::path::Path; use std::vec::Vec; enum CCascadeClassifier {} extern "C" { fn cv_cascade_classifier_new() -> *mut CCascadeClassifier; fn cv_cascade_classifier_load(cc: *mut CCascadeClassifier, p: *const c_char) -> bool; fn cv_cascade_classifier_drop(p: *mut CCascadeClassifier); fn cv_cascade_classifier_detect( cc: *mut CCascadeClassifier, cmat: *mut CMat, vec_of_rect: *mut CVec<Rect>, scale_factor: c_double, min_neighbors: c_int, flags: c_int, min_size: Size2i, max_size: Size2i, ); } /// We can safely send the classifier (a mutable pointer) to a different thread unsafe impl Send for CascadeClassifier {} /// An object detect trait. pub trait ObjectDetect { /// Detects the object inside this image and returns a list of detections /// with their confidence. fn detect(&self, image: &Mat) -> Vec<(Rect, f64)>; } /// Cascade classifier class for object detection. #[derive(Debug)] pub struct CascadeClassifier { inner: *mut CCascadeClassifier, } impl ObjectDetect for CascadeClassifier { fn detect(&self, image: &Mat) -> Vec<(Rect, f64)> { self.detect_multiscale(image) .into_iter() .map(|r| (r, 0f64)) .collect::<Vec<_>>() } } impl CascadeClassifier { /// Creates a cascade classifier, uninitialized. Before use, call load. pub fn new() -> CascadeClassifier { CascadeClassifier { inner: unsafe { cv_cascade_classifier_new() }, } } /// Creates a cascade classifier using the model specified. pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, Error> { let cc = CascadeClassifier::new(); cc.load(path)?; Ok(cc) } /// Loads the classifier model from a path. pub fn load<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> { if let Some(p) = path.as_ref().to_str() { let s = CString::new(p)?; if unsafe { cv_cascade_classifier_load(self.inner, (&s).as_ptr()) } { return Ok(()); } } Err(CvError::InvalidPath(path.as_ref().to_path_buf()).into()) } /// The default detection uses scale factor 1.1, minNeighbors 3, no min size /// or max size. pub fn detect_multiscale(&self, mat: &Mat) -> Vec<Rect> { self.detect_with_params(mat, 1.1, 3, Size2i::default(), Size2i::default()) } /// Detects the object using parameters specified. /// /// * `mat` - Matrix of the type CV_8U containing an image where objects are /// detected. /// * `scale_factor` - Parameter specifying how much the image size is /// reduced at each image scale. /// * `min_neighbors` - Parameter specifying how many neighbors each /// candidate rectangle should have to retain it. /// * `min_size` - Minimum possible object size. Objects smaller than that /// are ignored. /// * `max_size` - Maximum possible object size. Objects larger than that /// are ignored /// /// OpenCV has a parameter (`flags`) that's not used at all. pub fn detect_with_params( &self, mat: &Mat, scale_factor: f32, min_neighbors: c_int, min_size: Size2i, max_size: Size2i, ) -> Vec<Rect> { let mut c_result = CVec::<Rect>::default(); unsafe { cv_cascade_classifier_detect( self.inner, mat.inner, &mut c_result, scale_factor as c_double, min_neighbors, 0, min_size, max_size, ) } c_result.unpack() } } impl Drop for CascadeClassifier { fn drop(&mut self) { unsafe { cv_cascade_classifier_drop(self.inner); } } } #[derive(Debug, Clone, Copy)] /// Opaque type for C/C++ SvmDetector object pub enum CSvmDetector {} /// SvmDetector #[derive(Debug)] pub struct SvmDetector { /// Pointer to the inner data structure pub(crate) inner: *mut CSvmDetector, } extern "C" { fn cv_hog_default_people_detector() -> *mut CSvmDetector; fn cv_hog_daimler_people_detector() -> *mut CSvmDetector; fn cv_hog_detector_drop(d: *mut CSvmDetector); } impl SvmDetector { /// The built-in people detector. /// /// The size of the default people detector is 64x128, that mean that the /// people you would want to detect have to be atleast 64x128. pub fn default_people_detector() -> SvmDetector { SvmDetector { inner: unsafe { cv_hog_default_people_detector() }, } } /// Returns the Daimler people detector. pub fn daimler_people_detector() -> SvmDetector { SvmDetector { inner: unsafe { cv_hog_daimler_people_detector() }, } } } impl Drop for SvmDetector { fn drop(&mut self) { unsafe { cv_hog_detector_drop(self.inner); } } } /// Parameters that controls the behavior of HOG. #[derive(Debug, Clone, Copy)] pub struct HogParams { /// Detection window size. Align to block size and block stride. The default /// is 64x128, trained the same as original paper. pub win_size: Size2i, /// Block size in pixels. Align to cell size. Only (16,16) is supported for /// now (at least for GPU). pub block_size: Size2i, /// Block stride. It must be a multiple of cell size. pub block_stride: Size2i, /// Cell size. Only (8, 8) is supported for now. pub cell_size: Size2i, /// Number of bins. Only 9 bins per cell are supported for now. pub nbins: c_int, /// Gaussian smoothing window parameter. Default -1 for CPU and 4.0 for GPU. pub win_sigma: f64, /// L2-Hys normalization method shrinkage. Default 0.2. pub l2hys_threshold: f64, /// Flag to specify whether the gamma correction preprocessing is required /// or not. Default false. pub gamma_correction: bool, /// Maximum number of detection window increases (HOG scales). Default: 64. pub nlevels: usize, // ======================================================================= // Functions from detect function // ======================================================================= /// Threshold for the distance between features and SVM classifying /// plane. Usually it is 0 and should be specfied in the detector /// coefficients (as the last free coefficient). But if the free coefficient /// is omitted (which is allowed), you can specify it manually here. pub hit_threshold: f64, /// Window stride. It must be a multiple of block stride. pub win_stride: Size2i, /// Padding pub padding: Size2i, /// Coefficient of the detection window increase. pub scale: f64, /// Coefficient to regulate the similarity threshold. When detected, some /// objects can be covered by many rectangles. 0 means not to perform /// grouping. pub group_threshold: c_int, /// The useMeanShiftGrouping parameter is a boolean indicating whether or /// not mean-shift grouping should be performed to handle potential /// overlapping bounding boxes. While this value should not be set and users /// should employ non-maxima suppression instead, we support setting it as a /// library function. pub use_meanshift_grouping: bool, <|fim▁hole|> /// The `finalThreshold` parameter is mainly used to select the clusters /// that have at least `finalThreshold + 1` rectangles. This parameter is /// passed when meanShift is enabled; the function rejects the small /// clusters containing less than or equal to `finalThreshold` rectangles, /// computes the average rectangle size for the rest of the accepted /// clusters and adds those to the output rectangle list. pub final_threshold: f64, } const DEFAULT_WIN_SIGMA: f64 = -1f64; const DEFAULT_NLEVELS: usize = 64; impl Default for HogParams { fn default() -> HogParams { let win_sigma = { if cfg!(feature = "cuda") { 4.0 } else { DEFAULT_WIN_SIGMA } }; HogParams { win_size: Size2i::new(64, 128), block_size: Size2i::new(16, 16), block_stride: Size2i::new(8, 8), cell_size: Size2i::new(8, 8), nbins: 9, win_sigma: win_sigma, l2hys_threshold: 0.2, gamma_correction: false, nlevels: DEFAULT_NLEVELS, hit_threshold: 0f64, win_stride: Size2i::new(8, 8), padding: Size2i::default(), scale: 1.05, group_threshold: 2, final_threshold: 2.0, use_meanshift_grouping: false, } } } enum CHogDescriptor {} /// `HogDescriptor` implements Histogram of Oriented Gradients. #[derive(Debug)] pub struct HogDescriptor { inner: *mut CHogDescriptor, /// Hog parameters. pub params: HogParams, } unsafe impl Send for HogDescriptor {} extern "C" { fn cv_hog_new() -> *mut CHogDescriptor; fn cv_hog_drop(hog: *mut CHogDescriptor); fn cv_hog_set_svm_detector(hog: *mut CHogDescriptor, svm: *mut CSvmDetector); fn cv_hog_detect( hog: *mut CHogDescriptor, image: *mut CMat, objs: *mut CVec<Rect>, weights: *mut CVec<c_double>, win_stride: Size2i, padding: Size2i, scale: c_double, final_threshold: c_double, use_means_shift: bool, ); } impl Default for HogDescriptor { fn default() -> HogDescriptor { HogDescriptor { inner: unsafe { cv_hog_new() }, params: HogParams::default(), } } } impl ObjectDetect for HogDescriptor { fn detect(&self, image: &Mat) -> Vec<(Rect, f64)> { let mut detected = CVec::<Rect>::default(); let mut weights = CVec::<c_double>::default(); unsafe { cv_hog_detect( self.inner, image.inner, &mut detected, &mut weights, self.params.win_stride, self.params.padding, self.params.scale, self.params.final_threshold, self.params.use_meanshift_grouping, ) } let results = detected.unpack(); let weights = weights.unpack(); results.into_iter().zip(weights).collect::<Vec<_>>() } } impl HogDescriptor { /// Creates a HogDescriptor with provided parameters. pub fn with_params(params: HogParams) -> HogDescriptor { HogDescriptor { inner: unsafe { cv_hog_new() }, params: params, } } /// Sets the SVM detector. pub fn set_svm_detector(&mut self, detector: SvmDetector) { unsafe { cv_hog_set_svm_detector(self.inner, detector.inner) } } } impl Drop for HogDescriptor { fn drop(&mut self) { unsafe { cv_hog_drop(self.inner) } } }<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
# -*- coding:utf-8 -*- print("Hello")
<|file_name|>JobsAdminEntry.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at<|fim▁hole|> * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opensingular.requirement.module.admin.healthsystem.extension; import org.apache.wicket.markup.html.panel.Panel; import org.opensingular.requirement.module.admin.healthsystem.panel.JobPanel; public class JobsAdminEntry implements AdministrationEntryExtension { @Override public String name() { return "Jobs"; } @Override public Panel makePanel(String id) { return new JobPanel(id); } }<|fim▁end|>
<|file_name|>mtucicoin_fr.ts<|end_file_name|><|fim▁begin|><TS language="fr" version="2.0"> <context> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation>Faites un clic droit pour modifier l'adresse ou l'étiquette</translation> </message> <message> <source>Create a new address</source> <translation>Créer une nouvelle adresse</translation> </message> <message> <source>&amp;New</source> <translation>&amp;Nouveau</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Copier l'adresse courante sélectionnée dans le presse-papier</translation> </message> <message> <source>&amp;Copy</source> <translation>&amp;Copier</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Effacer l'adresse actuellement sélectionnée de la liste</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Supprimer</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Exporter les données de l'onglet courant vers un fichier</translation> </message> <message> <source>&amp;Export</source> <translation>&amp;Exporter</translation> </message> <message> <source>C&amp;lose</source> <translation>&amp;Fermer</translation> </message> <message> <source>Choose the address to send coins to</source> <translation>Choisir l'adresse à laquelle envoyer de la monnaie</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation>Choisir l'adresse avec laquelle recevoir de la monnaie</translation> </message> <message> <source>C&amp;hoose</source> <translation>C&amp;hoisir</translation> </message> <message> <source>Sending addresses</source> <translation>Adresses d'envoi</translation> </message> <message> <source>Receiving addresses</source> <translation>Adresses de réception</translation> </message> <message> <source>These are your Mtucicoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Ce sont vos adresses Mtucicoin pour l'envoi de paiements. Vérifiez toujours le montant et l'adresse de réception avant l'envoi de monnaies.</translation> </message> <message> <source>These are your Mtucicoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation>Ce sont vos adresses Mtucicoin pour la réception de paiements. Il est recommandé d'utiliser une nouvelle adresse de réception pour chaque transaction.</translation> </message> <message> <source>&amp;Copy Address</source> <translation>&amp;Copier l'adresse</translation> </message> <message> <source>Copy &amp;Label</source> <translation>Copier l'é&amp;tiquette</translation> </message> <message> <source>&amp;Edit</source> <translation>&amp;Modifier</translation> </message> <message> <source>Export Address List</source> <translation>Exporter la liste d'adresses</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Valeurs séparées par des virgules (*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation>L'exportation a échoué</translation> </message> <message> <source>There was an error trying to save the address list to %1. Please try again.</source> <translation>Une erreur est survenue lors de l'enregistrement de la liste d'adresses vers %1. Essayez à nouveau.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Étiquette</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>(no label)</source> <translation>(aucune étiquette)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Dialogue de phrase de passe</translation> </message> <message> <source>Enter passphrase</source> <translation>Saisir la phrase de passe</translation> </message> <message> <source>New passphrase</source> <translation>Nouvelle phrase de passe</translation> </message> <message> <source>Repeat new passphrase</source> <translation>Répéter la phrase de passe</translation> </message> <message> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Utiliser pour désactiver le mode d'envoi trivial de paiement lorsque le compte système est compromis. N'assure pas une sécurité efficace.</translation> </message> <message> <source>For anonymization only</source> <translation>Pour anonymisation uniquement</translation> </message> <message> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Saisir la nouvelle phrase de passe pour le portefeuille.&lt;br/&gt;Veuillez utiliser une phrase de passe de &lt;b&gt;dix caractères aléatoires ou plus&lt;/b&gt;, ou de &lt;b&gt;huit mots ou plus&lt;/b&gt;.</translation> </message> <message> <source>Encrypt wallet</source> <translation>Chiffrer le portefeuille</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Cette opération nécessite votre phrase de passe pour déverrouiller le portefeuille.</translation> </message> <message> <source>Unlock wallet</source> <translation>Déverrouiller le portefeuille</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Cette opération nécessite votre phrase de passe pour déchiffrer le portefeuille.</translation> </message> <message> <source>Decrypt wallet</source> <translation>Déchiffrer le portefeuille</translation> </message> <message> <source>Change passphrase</source> <translation>Changer le mot de passe</translation> </message> <message> <source>Enter the old and new passphrase to the wallet.</source> <translation>Saisir l’ancienne phrase de passe pour le portefeuille ainsi que la nouvelle.</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>Confirmer le chiffrement du portefeuille</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR MTUCICOIN&lt;/b&gt;!</source> <translation>Attention : Si vous chiffrez votre portefeuille et perdez votre phrase de passe, vous &lt;b&gt;PERDREZ TOUS VOS MTUCICOIN&lt;/b&gt; !</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Êtes-vous sûr de vouloir chiffrer votre portefeuille ?</translation> </message> <message> <source>Wallet encrypted</source> <translation>Portefeuille chiffré</translation> </message> <message> <source>Mtucicoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your mtucicoins from being stolen by malware infecting your computer.</source> <translation>Mtucicoin va à présent se fermer pour terminer le chiffrement. N'oubliez pas que le chiffrement de votre portefeuille n'est pas une protection totale contre le vol par des logiciels malveillants qui infecteraient votre ordinateur.</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANT : Toute sauvegarde précédente de votre fichier de portefeuille devrait être remplacée par le nouveau fichier de portefeuille chiffré. Pour des raisons de sécurité, les sauvegardes précédentes de votre fichier de portefeuille non chiffré deviendront inutilisables dès que vous commencerez à utiliser le nouveau portefeuille chiffré.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Le chiffrement du portefeuille a échoué</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Le chiffrement du portefeuille a échoué en raison d'une erreur interne. Votre portefeuille n'a pas été chiffré.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>Les phrases de passe saisies ne correspondent pas.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Le déverrouillage du portefeuille a échoué</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La phrase de passe saisie pour déchiffrer le portefeuille était incorrecte.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Le déchiffrage du portefeuille a échoué</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>La phrase de passe du portefeuille a été modifiée avec succès.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Attention : la touche Verr. Maj. est activée !</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <source>Mtucicoin Core</source> <translation>Mtucicoin Core</translation> </message> <message> <source>Wallet</source> <translation>Portefeuille</translation> </message> <message> <source>Node</source> <translation>Nœud</translation> </message> <message> <source>&amp;Overview</source> <translation>&amp;Vue d'ensemble</translation> </message> <message> <source>Show general overview of wallet</source> <translation>Afficher une vue d’ensemble du portefeuille</translation> </message> <message> <source>&amp;Send</source> <translation>&amp;Envoyer</translation> </message> <message> <source>Send coins to a Mtucicoin address</source> <translation>Envoyer des pièces sur une adresse Mtucicoin</translation> </message> <message> <source>&amp;Receive</source> <translation>&amp;Recevoir</translation> </message> <message> <source>Request payments (generates QR codes and mtucicoin: URIs)</source> <translation>Demande de paiements (Générer des QR code et des URIs mtucicoin)</translation> </message> <message> <source>&amp;Transactions</source> <translation>&amp;Transactions</translation> </message> <message> <source>Browse transaction history</source> <translation>Parcourir l'historique des transactions</translation> </message> <message> <source>E&amp;xit</source> <translation>Q&amp;uitter</translation> </message> <message> <source>Quit application</source> <translation>Quitter l’application</translation> </message> <message> <source>&amp;About Mtucicoin Core</source> <translation>À propos du noyau Mtucicoin</translation> </message> <message> <source>Show information about Mtucicoin Core</source> <translation>Affichez des informations à propos de Mtucicoin Core</translation> </message> <message> <source>About &amp;Qt</source> <translation>À propos de &amp;Qt</translation> </message> <message> <source>Show information about Qt</source> <translation>Afficher des informations sur Qt</translation> </message> <message> <source>&amp;Options...</source> <translation>&amp;Options...</translation> </message> <message> <source>Modify configuration options for Mtucicoin</source> <translation>Modifier les options de configuration pour Mtucicoin</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>&amp;Afficher / Cacher</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Afficher ou masquer la fenêtre principale</translation> </message> <message> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Chiffrer le portefeuille...</translation> </message> <message> <source>Encrypt the private keys that belong to your wallet</source> <translation>Chiffrer les clefs privées de votre portefeuille</translation> </message> <message> <source>&amp;Backup Wallet...</source> <translation>Sauvegarder le &amp;portefeuille...</translation> </message> <message> <source>Backup wallet to another location</source> <translation>Sauvegarder le portefeuille vers un autre emplacement</translation> </message> <message> <source>&amp;Change Passphrase...</source> <translation>&amp;Changer la phrase de passe...</translation> </message> <message> <source>Change the passphrase used for wallet encryption</source> <translation>Modifier la phrase de passe utilisée pour le chiffrement du portefeuille</translation> </message> <message> <source>&amp;Unlock Wallet...</source> <translation>&amp;Déverrouiller le portefeuille</translation> </message> <message> <source>Unlock wallet</source> <translation>Déverrouiller le portefeuille</translation> </message> <message> <source>&amp;Lock Wallet</source> <translation>&amp;Vérouiller le portefeuille</translation> </message> <message> <source>Sign &amp;message...</source> <translation>&amp;Signer le message...</translation> </message> <message> <source>Sign messages with your Mtucicoin addresses to prove you own them</source> <translation>Signer les messages avec votre adresses Mtucicoin pour prouver que vous êtes le propriétaire</translation> </message> <message> <source>&amp;Verify message...</source> <translation>&amp;Vérifier un message...</translation> </message> <message> <source>Verify messages to ensure they were signed with specified Mtucicoin addresses</source> <translation>Vérifier les messages pour vous assurer qu'ils ont été signés avec les adresses Mtucicoin spécifiées</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Informations</translation> </message> <message> <source>Show diagnostic information</source> <translation>Voir les informaion de diagnostique</translation> </message> <message> <source>&amp;Debug console</source> <translation>&amp;Console de débogage</translation> </message> <message> <source>Open debugging console</source> <translation>Ouvrir la console de débogage</translation> </message> <message> <source>&amp;Network Monitor</source> <translation>&amp;Moniteur réseau</translation> </message> <message> <source>Show network monitor</source> <translation>Voir le moniteur réseau</translation> </message> <message> <source>&amp;Peers list</source> <translation>Et la liste des pairs</translation> </message> <message> <source>Show peers info</source> <translation>Voir les infos des pairs</translation> </message> <message> <source>Wallet &amp;Repair</source> <translation>Portefeuille et Réparation</translation> </message> <message> <source>Show wallet repair options</source> <translation>Afficher les options de réparation du portefeuille</translation> </message> <message> <source>Open &amp;Configuration File</source> <translation>Ouvrir Fichier de &amp;Configuration</translation> </message> <message> <source>Open configuration file</source> <translation>Ouvrir fichier de configuration</translation> </message> <message> <source>Show Automatic &amp;Backups</source> <translation>Afficher les sauvegardes automatiques</translation> </message> <message> <source>Show automatically created wallet backups</source> <translation>Afficher automatiquement les sauvegardes de portefeuille créés </translation> </message> <message> <source>&amp;Sending addresses...</source> <translation>Adresses d'&amp;envoi...</translation> </message> <message> <source>Show the list of used sending addresses and labels</source> <translation>Afficher la liste d'adresses d'envoi et d'étiquettes utilisées</translation> </message> <message> <source>&amp;Receiving addresses...</source> <translation>Adresses de &amp;réception...</translation> </message> <message> <source>Show the list of used receiving addresses and labels</source> <translation>Afficher la liste d'adresses de réception et d'étiquettes utilisées</translation> </message> <message> <source>Open &amp;URI...</source> <translation>Ouvrir un &amp;URI...</translation> </message> <message> <source>Open a mtucicoin: URI or payment request</source> <translation>Ouvrir une URI ou demande de paiement mtucicoin</translation> </message> <message> <source>&amp;Command-line options</source> <translation>Options de ligne de &amp;commande</translation> </message> <message> <source>Mtucicoin Core client</source> <translation>Client Mtucicoin Core </translation> </message> <message numerus="yes"> <source>Processed %n blocks of transaction history.</source> <translation><numerusform> Traités %n blocs de l'historique des transactions.</numerusform><numerusform> Traités %n blocs de l'historique des transactions.</numerusform></translation> </message> <message> <source>Synchronizing additional data: %p%</source> <translation>Synchronisation des données additionnelles: %p%</translation> </message> <message> <source>Show the Mtucicoin Core help message to get a list with possible Mtucicoin command-line options</source> <translation>Afficher le message d'aide de Mtucicoin Core pour obtenir une liste des options de ligne de commande Bitcoin possibles.</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Fichier</translation> </message> <message> <source>&amp;Settings</source> <translation>&amp;Réglages</translation> </message> <message> <source>&amp;Tools</source> <translation>&amp;Outils</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Aide</translation> </message> <message> <source>Tabs toolbar</source> <translation>Barre d'outils des onglets</translation> </message> <message numerus="yes"> <source>%n active connection(s) to Mtucicoin network</source> <translation><numerusform>%n connexion active au réseau Mtucicoin </numerusform><numerusform>%n connexions actives au réseau Mtucicoin </numerusform></translation> </message> <message> <source>Synchronizing with network...</source> <translation>Synchronisation avec le réseau en cours...</translation> </message> <message> <source>Importing blocks from disk...</source> <translation>Importation des blocs depuis le disque...</translation> </message> <message> <source>Reindexing blocks on disk...</source> <translation>Réindexation des blocs sur le disque...</translation> </message> <message> <source>No block source available...</source> <translation>Aucune source de blocs disponible...</translation> </message> <message> <source>Up to date</source> <translation>À jour</translation> </message> <message numerus="yes"> <source>%n hour(s)</source> <translation><numerusform>%n heures</numerusform><numerusform>%n heures</numerusform></translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation><numerusform>%n jours</numerusform><numerusform>%n jours</numerusform></translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation><numerusform>%n semaines</numerusform><numerusform>%n semaines</numerusform></translation> </message> <message> <source>%1 and %2</source> <translation>%1 et %2</translation> </message> <message numerus="yes"> <source>%n year(s)</source> <translation><numerusform>%n année(s)</numerusform><numerusform>%n années</numerusform></translation> </message> <message> <source>%1 behind</source> <translation>%1 en retard</translation> </message> <message> <source>Catching up...</source> <translation>Rattrapage en cours...</translation> </message> <message> <source>Last received block was generated %1 ago.</source> <translation>Le dernier bloc reçu avait été généré il y a %1.</translation> </message> <message> <source>Transactions after this will not yet be visible.</source> <translation>Les transactions après ceci ne sont pas encore visibles.</translation> </message> <message> <source>Error</source> <translation>Erreur</translation> </message> <message> <source>Warning</source> <translation>Avertissement</translation> </message> <message> <source>Information</source> <translation>Information</translation> </message> <message> <source>Sent transaction</source> <translation>Transaction envoyée</translation> </message> <message> <source>Incoming transaction</source> <translation>Transaction entrante</translation> </message> <message> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Date : %1 Montant : %2 Type : %3 Adresse : %4 </translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Le portefeuille est &lt;b&gt;chiffré&lt;/b&gt; et est actuellement &lt;b&gt;déverrouillé&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt; for anonimization only</source> <translation>Le portefeuille est &lt;b&gt;chiffré&lt;/b&gt; et est actuellement &lt;b&gt;déverrouillé&lt;/b&gt; seulement pour l'anonymisation</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Le portefeuille est &lt;b&gt;chiffré&lt;/b&gt; et actuellement &lt;b&gt;verrouillé&lt;/b&gt;</translation> </message> </context> <context> <name>ClientModel</name> <message> <source>Total: %1 (DS compatible: %2 / Enabled: %3)</source> <translation>Total: %1 (Compatible DS: %2 / Actifs: %3)</translation> </message> <message> <source>Network Alert</source> <translation>Alerte réseau</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Quantity:</source> <translation>Quantité :</translation> </message> <message> <source>Bytes:</source> <translation>Octets :</translation> </message> <message> <source>Amount:</source> <translation>Montant :</translation> </message> <message> <source>Priority:</source> <translation>Priorité :</translation> </message> <message> <source>Fee:</source> <translation>Frais :</translation> </message> <message> <source>Coin Selection</source> <translation>Sélection de la Monnaie</translation> </message> <message> <source>Dust:</source> <translation>Poussière:</translation> </message> <message> <source>After Fee:</source> <translation>Après les frais :</translation> </message> <message> <source>Change:</source> <translation>Monnaie :</translation> </message> <message> <source>(un)select all</source> <translation>Tout (dé)sélectionner</translation> </message> <message> <source>Tree mode</source> <translation>Mode arborescence</translation> </message> <message> <source>List mode</source> <translation>Mode liste</translation> </message> <message> <source>(1 locked)</source> <translation>(1 verrouillé)</translation> </message> <message> <source>Amount</source> <translation>Montant</translation> </message> <message> <source>Received with label</source> <translation>Reçu avec étiquette</translation> </message> <message> <source>Received with address</source> <translation>Reçu avec adresse</translation> </message> <message> <source>DS Rounds</source> <translation>Cycles DS</translation> </message> <message> <source>Date</source> <translation>Date</translation> </message> <message> <source>Confirmations</source> <translation>Confirmations</translation> </message> <message> <source>Confirmed</source> <translation>Confirmée</translation> </message> <message> <source>Priority</source> <translation>Priorité</translation> </message> <message> <source>Copy address</source> <translation>Copier l’adresse</translation> </message> <message> <source>Copy label</source> <translation>Copier l’étiquette</translation> </message> <message> <source>Copy amount</source> <translation>Copier le montant</translation> </message> <message> <source>Copy transaction ID</source> <translation>Copier l'ID de la transaction</translation> </message> <message> <source>Lock unspent</source> <translation>Verrouiller ce qui n'est pas dépensé</translation> </message> <message> <source>Unlock unspent</source> <translation>Déverrouiller ce qui n'est pas dépensé</translation> </message> <message> <source>Copy quantity</source> <translation>Copier la quantité</translation> </message> <message> <source>Copy fee</source> <translation>Copier les frais</translation> </message> <message> <source>Copy after fee</source> <translation>Copier le montant après les frais</translation> </message> <message> <source>Copy bytes</source> <translation>Copier les octets</translation> </message> <message> <source>Copy priority</source> <translation>Copier la priorité</translation> </message> <message> <source>Copy dust</source> <translation>Copier poussière</translation> </message> <message> <source>Copy change</source> <translation>Copier la monnaie</translation> </message> <message> <source>Non-anonymized input selected. &lt;b&gt;Darksend will be disabled.&lt;/b&gt;&lt;br&gt;&lt;br&gt;If you still want to use Darksend, please deselect all non-nonymized inputs first and then check Darksend checkbox again.</source> <translation>Entrée non-anonymisées sélectionnée. &lt;b&gt; Darksend sera désactivé. &lt;/ b&gt; &lt;br&gt; Si vous voulez continuer à utiliser Darksend, veuillez désélectionner toutes les entrées non-anonymisées d'abord, puis vérifier à nouveau la case Darksend.</translation> </message> <message> <source>highest</source> <translation>la plus élevée</translation> </message> <message> <source>higher</source> <translation>plus élevée</translation> </message> <message> <source>high</source> <translation>élevée</translation> </message> <message> <source>medium-high</source> <translation>moyennement-élevée</translation> </message> <message> <source>Can vary +/- %1 duff(s) per input.</source> <translation>Peut varier de +/- %1 duff(s) par entrée.</translation> </message> <message> <source>n/a</source> <translation>n/a</translation> </message> <message> <source>medium</source> <translation>moyenne</translation> </message> <message> <source>low-medium</source> <translation>moyennement-basse</translation> </message> <message> <source>low</source> <translation>basse</translation> </message> <message> <source>lower</source> <translation>plus basse</translation> </message> <message> <source>lowest</source> <translation>la plus basse</translation> </message> <message> <source>(%1 locked)</source> <translation>(%1 verrouillé)</translation> </message> <message> <source>none</source> <translation>aucun</translation> </message> <message> <source>yes</source> <translation>oui</translation> </message> <message> <source>no</source> <translation>non</translation> </message> <message> <source>This label turns red, if the transaction size is greater than 1000 bytes.</source> <translation>Cette étiquette devient rouge si la taille de la transaction est plus grande que 1 000 octets.</translation> </message> <message> <source>This means a fee of at least %1 per kB is required.</source> <translation>Cela signifie qu'une taxe d'au moins %1 par ko est nécessaire </translation> </message> <message> <source>Can vary +/- 1 byte per input.</source> <translation>Peut varier +/- 1 octet par entrée.</translation> </message> <message> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation>Les transactions à priorité plus haute sont plus à même d'être incluses dans un bloc.</translation> </message> <message> <source>This label turns red, if the priority is smaller than "medium".</source> <translation>Cette étiquette devient rouge si la priorité est plus basse que « moyenne »</translation> </message> <message> <source>This label turns red, if any recipient receives an amount smaller than %1.</source> <translation>Cette étiquette devient rouge si un destinataire reçoit un montant inférieur à %1.</translation> </message> <message> <source>(no label)</source> <translation>(aucune étiquette)</translation> </message> <message> <source>change from %1 (%2)</source> <translation>monnaie de %1 (%2)</translation> </message> <message> <source>(change)</source> <translation>(monnaie)</translation> </message> </context> <context> <name>DarksendConfig</name> <message> <source>Configure Darksend</source> <translation>Configurer Darksend</translation> </message> <message> <source>Basic Privacy</source> <translation>Confidentialité normale</translation> </message> <message> <source>High Privacy</source> <translation>Confidentialité élevée</translation> </message> <message> <source>Maximum Privacy</source> <translation>Confidentialité maximale</translation> </message> <message> <source>Please select a privacy level.</source> <translation>Veuillez choisir un niveau de confidentialité.</translation> </message> <message> <source>Use 2 separate masternodes to mix funds up to 1000 MTUCICOIN</source> <translation>Utiliser 2 masternodes pour mélanger jusqu'à 1000 MTUCICOIN</translation> </message> <message> <source>Use 8 separate masternodes to mix funds up to 1000 MTUCICOIN</source> <translation>Utiliser 8 masternodes pour mélanger jusqu'à 1000 MTUCICOIN</translation> </message> <message> <source>Use 16 separate masternodes</source> <translation>Utiliser 16 masternodes</translation> </message> <message> <source>This option is the quickest and will cost about ~0.025 MTUCICOIN to anonymize 1000 MTUCICOIN</source> <translation>Cette option est la plus rapide et coûtera environ 0,025 MTUCICOIN pour anonymiser 1000 MTUCICOIN</translation> </message> <message> <source>This option is moderately fast and will cost about 0.05 MTUCICOIN to anonymize 1000 MTUCICOIN</source> <translation>Cette option est un peu moins rapide et coûtera environ 0,05 MTUCICOIN pour anonymiser 1000 MTUCICOIN</translation> </message> <message> <source>0.1 MTUCICOIN per 1000 MTUCICOIN you anonymize.</source> <translation>0,1 MTUCICOIN par 1000 MTUCICOIN anonymisés.</translation> </message> <message> <source>This is the slowest and most secure option. Using maximum anonymity will cost</source> <translation>Cette option est le plus lente et la plus sécurisée. Utiliser l'anonymisation maximale coûtera</translation> </message> <message> <source>Darksend Configuration</source> <translation>Configuration de Darksend</translation> </message> <message> <source>Darksend was successfully set to basic (%1 and 2 rounds). You can change this at any time by opening Mtucicoin's configuration screen.</source> <translation>Darksend est réglé avec succès sur normal (%1 and 2 rounds). Vous pouvez changer cela à tout moment en ouvrant la fenêtre de configuration du Mtucicoin.</translation> </message> <message> <source>Darksend was successfully set to high (%1 and 8 rounds). You can change this at any time by opening Mtucicoin's configuration screen.</source> <translation>Darksend est réglé avec succès sur haut (%1 and 8 rounds). Vous pouvez changer cela à tout moment en ouvrant la fenêtre de configuration du Mtucicoin.</translation> </message> <message> <source>Darksend was successfully set to maximum (%1 and 16 rounds). You can change this at any time by opening Mtucicoin's configuration screen.</source> <translation>Darksend est réglé avec succès sur maximum (%1 and 16 rounds). Vous pouvez changer cela à tout moment en ouvrant la fenêtre de configuration du Mtucicoin.</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Modifier l'adresse</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Étiquette</translation> </message> <message> <source>The label associated with this address list entry</source> <translation>L'étiquette associée à cette entrée de la liste d'adresses</translation> </message> <message> <source>&amp;Address</source> <translation>&amp;Adresse</translation> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>L'adresse associée à cette entrée de la liste d'adresses. Ceci ne peut être modifié que pour les adresses d'envoi.</translation> </message> <message> <source>New receiving address</source> <translation>Nouvelle adresse de réception</translation> </message> <message> <source>New sending address</source> <translation>Nouvelle adresse d’envoi</translation> </message> <message> <source>Edit receiving address</source> <translation>Modifier l’adresse de réception</translation> </message> <message> <source>Edit sending address</source> <translation>Modifier l’adresse d'envoi</translation> </message> <message> <source>The entered address "%1" is not a valid Mtucicoin address.</source> <translation>L'adresse entrée "%1" est pas une adresse Mtucicoin valide</translation> </message> <message> <source>The entered address "%1" is already in the address book.</source> <translation>L’adresse fournie « %1 » est déjà présente dans le carnet d'adresses.</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Impossible de déverrouiller le portefeuille.</translation> </message> <message> <source>New key generation failed.</source> <translation>Échec de génération de la nouvelle clef.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation>Un nouveau répertoire de données sera créé.</translation> </message> <message> <source>name</source> <translation>nom</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Le répertoire existe déjà. Ajoutez %1 si vous voulez créer un nouveau répertoire ici.</translation> </message> <message> <source>Path already exists, and is not a directory.</source> <translation>Le chemin existe déjà et n'est pas un répertoire.</translation> </message> <message> <source>Cannot create data directory here.</source> <translation>Impossible de créer un répertoire de données ici.</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>Mtucicoin Core</source> <translation>Mtucicoin Core</translation> </message> <message> <source>version</source> <translation>version</translation> </message> <message> <source>(%1-bit)</source> <translation>(%1-bit)</translation> </message> <message> <source>About Mtucicoin Core</source> <translation>A propos de Mtucicoin Core</translation> </message> <message> <source>Command-line options</source> <translation>Options de ligne de commande</translation> </message> <message> <source>Usage:</source> <translation>Utilisation :</translation> </message> <message> <source>command-line options</source> <translation>options de ligne de commande</translation> </message> <message> <source>UI options</source> <translation>Options de l'interface utilisateur</translation> </message> <message> <source>Choose data directory on startup (default: 0)</source> <translation>Choisir un répertoire de données au démarrage (par défaut : 0)</translation> </message> <message> <source>Set language, for example "de_DE" (default: system locale)</source> <translation>Définir la langue, par exemple « fr_CA » (par défaut : la langue du système)</translation> </message> <message> <source>Start minimized</source> <translation>Démarrer minimisé</translation> </message> <message> <source>Set SSL root certificates for payment request (default: -system-)</source> <translation>Définir les certificats SSL racine pour les requêtes de paiement (par défaut : -système-)</translation> </message> <message> <source>Show splash screen on startup (default: 1)</source> <translation>Afficher l'écran d'accueil au démarrage (par défaut : 1)</translation> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Bienvenue</translation> </message> <message> <source>Welcome to Mtucicoin Core.</source> <translation>Bienvenue à Mtucicoin Core</translation> </message> <message> <source>As this is the first time the program is launched, you can choose where Mtucicoin Core will store its data.</source> <translation>Comme il s'agit du premier lancement du logiciel, vous pouvez choisir l'emplacement où Mtucicoin Core sauvegardera ses données.</translation> </message> <message> <source>Mtucicoin Core will download and store a copy of the Mtucicoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation>Mtucicoin Core téléchargera et sauvegardera une copie de la chaîne de blocs Mtucicoin. Au moins %1Go de données seront sauvegardées dans ce répertoire, et cette taille augmentera avec le temps. Le portefeuille sera aussi sauvegardé dans ce répertoire.</translation> </message> <message> <source>Use the default data directory</source> <translation>Utiliser le répertoire de données par défaut</translation> </message> <message> <source>Use a custom data directory:</source> <translation>Utiliser un répertoire de données personnalisé :</translation> </message> <message> <source>Mtucicoin Core</source> <translation>Mtucicoin Core</translation> </message> <message> <source>Error: Specified data directory "%1" cannot be created.</source> <translation>Erreur: Le répertoire de données spécifié « %1 » ne peut pas être créé.</translation> </message> <message> <source>Error</source> <translation>Erreur</translation> </message> <message> <source>%1 GB of free space available</source> <translation>%1 Go d'espace libre disponible</translation> </message> <message> <source>(of %1 GB needed)</source> <translation>( de %1 Go nécessaire)</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open URI</source> <translation>Ouvrir un URI</translation> </message> <message> <source>Open payment request from URI or file</source> <translation>Ouvrir une demande de paiement à partir d'un URI ou d'un fichier</translation> </message> <message> <source>URI:</source> <translation>URI :</translation> </message> <message> <source>Select payment request file</source> <translation>Choisir le fichier de demande de paiement</translation> </message> <message> <source>Select payment request file to open</source> <translation>Choisir le fichier de demande de paiement à ouvrir</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Options</translation> </message> <message> <source>&amp;Main</source> <translation>Réglages &amp;principaux</translation> </message> <message> <source>Automatically start Mtucicoin after logging in to the system.</source> <translation>Démarrer Mtucicoin automatiquement au démarrage du système.</translation> </message> <message> <source>&amp;Start Mtucicoin on system login</source> <translation>&amp;Démarrer Mtucicoin au démarrage du système</translation> </message> <message> <source>Size of &amp;database cache</source> <translation>Taille du cache de la base de &amp;données</translation> </message> <message> <source>MB</source> <translation>Mo</translation> </message> <message> <source>Number of script &amp;verification threads</source> <translation>Nombre d'exétrons de &amp;vérification de script</translation> </message> <message> <source>(0 = auto, &lt;0 = leave that many cores free)</source> <translation>(0 = auto, &lt; 0 = laisser ce nombre de cœurs inutilisés)</translation> </message> <message> <source>Darksend rounds to use</source> <translation>Nombre de cycles Darksend à effectuer</translation> </message> <message> <source>This amount acts as a threshold to turn off Darksend once it's reached.</source> <translation>Ce montant est le seuil pour désactiver Darksend dès qu'il est atteint.</translation> </message> <message> <source>Amount of Mtucicoin to keep anonymized</source> <translation>Nombre de Mtucicoin à conserver anonymisés</translation> </message> <message> <source>W&amp;allet</source> <translation>&amp;Portefeuille</translation> </message> <message> <source>If you disable the spending of unconfirmed change, the change from a transaction&lt;br/&gt;cannot be used until that transaction has at least one confirmation.&lt;br/&gt;This also affects how your balance is computed.</source> <translation>Si vous désactivez la dépense de la monnaie non confirmée, la monnaie d'une transaction&lt;br/&gt;ne peut pas être utilisée tant que cette transaction n'a pas reçu au moins une confirmation.&lt;br/&gt;Ceci affecte aussi comment votre solde est calculé.</translation> </message> <message> <source>Accept connections from outside</source> <translation>Accepter les connexions provenant de l'extérieur</translation> </message> <message> <source>Allow incoming connections</source> <translation>Autoriser les connexions entrantes</translation> </message> <message> <source>Connect to the Mtucicoin network through a SOCKS5 proxy.</source> <translation>Se connecter au réseau Mtucicoin à travers un proxy SOCKS5.</translation> </message> <message> <source>&amp;Connect through SOCKS5 proxy (default proxy):</source> <translation>&amp;Connectez par SOCKS5 (proxy par défaut):</translation> </message> <message> <source>Expert</source> <translation>Expert</translation> </message> <message> <source>This setting determines the amount of individual masternodes that an input will be anonymized through.&lt;br/&gt;More rounds of anonymization gives a higher degree of privacy, but also costs more in fees.</source> <translation>Ce paramètre détermine le nombre de masternodes uniques par lesquels l'anonymisation sera effectuée.&lt;br/&gt;Plus le nombre de cycles d'anonymisation est important, plus le degré de confidentialité est élevé, mais les frais associés sont d'autant plus importants.</translation> </message> <message> <source>Whether to show coin control features or not.</source> <translation>Afficher ou non les fonctions de contrôle des pièces.</translation> </message> <message> <source>Enable coin &amp;control features</source> <translation>Activer les fonctions de &amp;contrôle des pièces </translation> </message> <message> <source>&amp;Spend unconfirmed change</source> <translation>&amp;Dépenser la monnaie non confirmée</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Réseau</translation> </message> <message> <source>Automatically open the Mtucicoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Ouvrir automatiquement le port client Mtucicoin sur le routeur. Cela ne fonctionne que sur les routeurs supportant et ayant activé UPnP.</translation> </message> <message> <source>Map port using &amp;UPnP</source> <translation>Mapper le port avec l'&amp;UPnP</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>&amp;IP du serveur mandataire :</translation> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>Adresse IP du mandataire (par ex. IPv4 : 127.0.0.1 / IPv6 : ::1)</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Port :</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Port du serveur mandataire (par ex. 9050)</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Fenêtre</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Afficher uniquement une icône système après minimisation.</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimiser dans la barre système au lieu de la barre des tâches</translation> </message> <message> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimiser au lieu de quitter l'application lorsque la fenêtre est fermée. Si cette option est activée, l'application ne pourra être fermée qu'en sélectionnant Quitter dans le menu.</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>M&amp;inimiser lors de la fermeture</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Affichage</translation> </message> <message> <source>User Interface &amp;language:</source> <translation>&amp;Langue de l'interface utilisateur :</translation> </message> <message> <source>The user interface language can be set here. This setting will take effect after restarting Mtucicoin.</source> <translation>La langue de l'interface utilisateur peut être modifiée ici. Ce paramètre sera pris en compte au redémarrage de Mtucicoin.</translation> </message> <message> <source>Language missing or translation incomplete? Help contributing translations here: https://www.transifex.com/projects/p/mtucicoin/</source> <translation>Langage manquant ou traduction incomplète ? Participez aux traductions ici : https://www.transifex.com/projects/p/mtucicoin/</translation> </message> <message> <source>User Interface Theme:</source> <translation>Thème d'Interface de l'utilisateur :</translation> </message> <message> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unité d'affichage des montants :</translation> </message> <message> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Choisissez la sous-unité par défaut pour l'affichage dans l'interface et lors de l'envoi de pièces.</translation> </message> <message> <source>Decimal digits</source> <translation>Nombre de décimales</translation> </message> <message> <source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source> <translation>URL de tiers (par ex. un explorateur de blocs) apparaissant dans l'onglet des transactions comme éléments du menu contextuel. %s dans l'URL est remplacé par le hachage de la transaction. Les URL multiples sont séparées par une barre verticale |.</translation> </message> <message> <source>Third party transaction URLs</source> <translation>URL de transaction d'un tiers</translation> </message> <message> <source>Active command-line options that override above options:</source> <translation>Options actives de ligne de commande qui annulent les options ci-dessus :</translation> </message> <message> <source>Reset all client options to default.</source> <translation>Réinitialiser toutes les options du client aux valeurs par défaut.</translation> </message> <message> <source>&amp;Reset Options</source> <translation>&amp;Réinitialisation des options</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <source>&amp;Cancel</source> <translation>A&amp;nnuler</translation> </message> <message> <source>default</source> <translation>par défaut</translation> </message> <message> <source>none</source> <translation>aucune</translation> </message> <message> <source>Confirm options reset</source> <translation>Confirmer la réinitialisation des options</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Le redémarrage du client est nécessaire pour activer les changements.</translation> </message> <message> <source>Client will be shutdown, do you want to proceed?</source> <translation>Le client sera arrêté, voulez-vous continuer?</translation> </message> <message> <source>This change would require a client restart.</source> <translation>Ce changement nécessite un redémarrage du client.</translation> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>L'adresse de serveur mandataire fournie est invalide.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <source>Form</source> <translation>Formulaire</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Mtucicoin network after a connection is established, but this process has not completed yet.</source> <translation>L'information affichée peut être obsolète. Votre portefeuille se synchronise automatiquement avec le réseau Mtucicoin lorsque la connection est établie, mais le process n'est pas encore terminé.</translation> </message> <message> <source>Available:</source> <translation>Disponible :</translation> </message> <message> <source>Your current spendable balance</source> <translation>Votre solde actuel pouvant être dépensé</translation> </message> <message> <source>Pending:</source> <translation>En attente :</translation> </message> <message> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Total des transactions qui doivent encore être confirmées et qu'il n'est pas encore possible de dépenser</translation> </message> <message> <source>Immature:</source> <translation>Immature :</translation> </message> <message> <source>Mined balance that has not yet matured</source> <translation>Le solde généré n'est pas encore mûr</translation> </message> <message> <source>Balances</source> <translation>soldes</translation> </message> <message> <source>Unconfirmed transactions to watch-only addresses</source> <translation>Transactions non confirmés d'adresses en lecture seule</translation> </message> <message> <source>Mined balance in watch-only addresses that has not yet matured</source> <translation>Solde miné pour les adresses en lecture seule qui n'ont pas encore mûri</translation> </message> <message> <source>Total:</source> <translation>Total :</translation> </message> <message> <source>Your current total balance</source> <translation>Votre solde total actuel</translation> </message> <message> <source>Current total balance in watch-only addresses</source> <translation>Solde total actuel pour les adresses en lecture seule</translation> </message> <message> <source>Watch-only:</source> <translation>Lecture seule:</translation> </message> <message> <source>Your current balance in watch-only addresses</source> <translation>Votre solde actuel pour adresses en lecture seule</translation> </message> <message> <source>Spendable:</source> <translation>Disponible:</translation> </message> <message> <source>Status:</source> <translation>Status :</translation> </message> <message> <source>Enabled/Disabled</source> <translation>Activé/Désactivé</translation> </message> <message> <source>Completion:</source> <translation>Complétude :</translation> </message> <message> <source>Darksend Balance:</source> <translation>Balance Darksend :</translation> </message> <message> <source>Amount and Rounds:</source> <translation>Montant et Cycles</translation> </message> <message> <source>0 MTUCICOIN / 0 Rounds</source> <translation>0 MTUCICOIN / 0 Cycles</translation> </message> <message> <source>Submitted Denom:</source> <translation>Denom soumis :</translation> </message> <message> <source>n/a</source> <translation>n/a</translation> </message> <message> <source>Darksend</source> <translation>Darksend</translation> </message> <message> <source>Recent transactions</source> <translation>Transactions récentes</translation> </message> <message> <source>Start/Stop Mixing</source> <translation>Démarrer/Arrêtér le mélange</translation> </message> <message> <source>The denominations you submitted to the Masternode.&lt;br&gt;To mix, other users must submit the exact same denominations.</source> <translation>Les dénominations que vous avez soumises à la Masternode.&lt;br&gt;Pour mélanger, d'autres utilisateurs doivent soumettre les mêmes dénominations.</translation> </message> <message> <source>(Last Message)</source> <translation>(Dernier Message)</translation> </message> <message> <source>Try to manually submit a Darksend request.</source> <translation>Essayer de soumettre manuellement une requête Darksend.</translation> </message> <message> <source>Try Mix</source> <translation>Essayer le mélange</translation> </message> <message> <source>Reset the current status of Darksend (can interrupt Darksend if it's in the process of Mixing, which can cost you money!)</source> <translation>Réinitialiser le statut de Darksend (peut interrompre Darksend si le process de mélange est en cours, ce qui peut vous coûter de l'argent !)</translation> </message> <message> <source>Reset</source> <translation>Réinitialiser</translation> </message> <message> <source>out of sync</source> <translation>désynchronisé</translation> </message> <message> <source>Disabled</source> <translation>Désactivé</translation> </message> <message> <source>Start Darksend Mixing</source> <translation>Démarrer le mélange Darksend</translation> </message> <message> <source>Stop Darksend Mixing</source> <translation>Arrêter le mélange Darksend</translation> </message> <message> <source>No inputs detected</source> <translation>Aucune entrée détectée</translation> </message> <message numerus="yes"> <source>%n Rounds</source> <translation><numerusform>%n Cycle</numerusform><numerusform>%n Cycles</numerusform></translation> </message> <message> <source>Not enough compatible inputs to anonymize &lt;span style='color:red;'&gt;%1&lt;/span&gt;,&lt;br&gt;will anonymize &lt;span style='color:red;'&gt;%2&lt;/span&gt; instead</source> <translation>Pas assez d'entrées compatibles pour anonymiser &lt;span style='color:red;'&gt;%1&lt;/span&gt;, &lt;br&gt;nous allons anonymiser &lt;span style='color:red;'&gt;%2&lt;/span&gt; à la place</translation> </message> <message> <source>Overall progress</source> <translation>Progrès global</translation> </message> <message> <source>Denominated</source> <translation>Dénommées</translation> </message> <message> <source>Anonymized</source> <translation>Anonymisés</translation> </message> <message numerus="yes"> <source>Denominated inputs have %5 of %n rounds on average</source> <translation><numerusform>Les entrées dénommées ont %5 sur %n cycles en moyennes</numerusform><numerusform>Les entrées dénommées ont %5 sur %n cycles en moyennes</numerusform></translation> </message> <message> <source>Found enough compatible inputs to anonymize %1</source> <translation>Assez d'entrées compatibles trouvées pour anonymiser %1</translation> </message> <message> <source>Mixed</source> <translation>Mélangés</translation> </message> <message> <source>Enabled</source> <translation>Activé</translation> </message> <message> <source>Last Darksend message: </source> <translation>Dernier message de Darksend: </translation> </message> <message> <source>N/A</source> <translation>N.D.</translation> </message> <message> <source>Darksend was successfully reset.</source> <translation>Darksend est réinitialisé avec succès</translation> </message> <message> <source>If you don't want to see internal Darksend fees/transactions select "Most Common" as Type on the "Transactions" tab.</source> <translation>Pour ne pas voir les transactions/frais Darksend internes sélectionnez "Les plus Communes" comme Type dans l'onglet "Transactions"</translation> </message> <message> <source>Darksend requires at least %1 to use.</source> <translation>Darksend nécessite au moins %1 pour l'utiliser</translation> </message> <message> <source>Wallet is locked and user declined to unlock. Disabling Darksend.</source> <translation>Le portefeuille est vérouillé et l'utilisateur a refusé de le débloquer. Désactivation de Darksend.</translation> </message> </context> <context> <name>PaymentServer</name> <message> <source>Payment request error</source> <translation>Erreur de demande de paiement</translation> </message> <message> <source>Cannot start mtucicoin: click-to-pay handler</source> <translation>Impossible de démarrer mtucicoin: click-to-pay le gestionnaire</translation> </message> <message> <source>URI handling</source> <translation>Gestion des URIs</translation> </message> <message> <source>Payment request fetch URL is invalid: %1</source> <translation>L'URL de récupération de la demande de paiement est invalide : %1</translation> </message> <message> <source>Payment request file handling</source> <translation>Gestion des fichiers de demande de paiement</translation> </message> <message> <source>Invalid payment address %1</source> <translation>Adresse de paiement %1 invalide</translation> </message> <message> <source>URI cannot be parsed! This can be caused by an invalid Mtucicoin address or malformed URI parameters.</source> <translation>L'URI ne peut être analysé ! Ceci peut être causé par une adresse Mtucicoin invalide ou par des paramètres d'URI mal composé.</translation> </message> <message> <source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source> <translation>Le fichier de demande de paiement ne peut pas être lu ou traité ! Ceci peut être causé par un fichier de demande de paiement invalide.</translation> </message> <message> <source>Payment request rejected</source> <translation>La demande de paiement a été rejetée</translation> </message> <message> <source>Payment request network doesn't match client network.</source> <translation>Le réseau de la demande de paiement ne correspond pas au réseau du client</translation> </message> <message> <source>Payment request has expired.</source> <translation>La demande de paiement a expiré</translation> </message> <message> <source>Payment request is not initialized.</source> <translation>La demande de paiement n'est pas initialisée</translation> </message> <message> <source>Unverified payment requests to custom payment scripts are unsupported.</source> <translation>Les demandes de paiements non vérifiées à des scripts de paiement personnalisés ne sont pas prises en charge.</translation> </message> <message> <source>Requested payment amount of %1 is too small (considered dust).</source> <translation>Le paiement demandé d'un montant de %1 est trop faible (considéré comme de la poussière).</translation> </message> <message> <source>Refund from %1</source> <translation>Remboursement de %1</translation> </message> <message> <source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source> <translation>La demande de paiement %1 est trop volumineuse (%2 octets sur %3 permis)</translation> </message> <message> <source>Payment request DoS protection</source> <translation>Protection DoS de la demande de paiement</translation> </message> <message> <source>Error communicating with %1: %2</source> <translation>Erreur de communication avec %1 : %2</translation> </message> <message> <source>Payment request cannot be parsed!</source> <translation>La demande de paiement ne peux pas être analyzée!</translation> </message> <message> <source>Bad response from server %1</source> <translation>Mauvaise réponse du serveur %1</translation> </message> <message> <source>Network request error</source> <translation>Erreur de demande réseau</translation> </message> <message> <source>Payment acknowledged</source> <translation>Le paiement a été confirmé</translation> </message> </context> <context> <name>PeerTableModel</name> <message> <source>Address/Hostname</source> <translation>Adresse/Nom d'hôte</translation> </message> <message> <source>User Agent</source> <translation>Agent de l'utilisateur</translation> </message> <message> <source>Ping Time</source> <translation>Temps de Ping</translation> </message> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Montant</translation> </message> <message> <source>Enter a Mtucicoin address (e.g. %1)</source> <translation>Entrez une adresse Mtucicoin (e.g. %1)</translation> </message> <message> <source>%1 d</source> <translation>%1 j</translation> </message> <message> <source>%1 h</source> <translation>%1 h</translation> </message> <message> <source>%1 m</source> <translation>%1 m</translation> </message> <message> <source>%1 s</source> <translation>%1 s</translation> </message> <message> <source>NETWORK</source> <translation>RÉSEAU</translation> </message> <message> <source>UNKNOWN</source> <translation>INCONNU</translation> </message> <message> <source>None</source> <translation>Aucun</translation> </message> <message> <source>N/A</source> <translation>N.D.</translation> </message> <message> <source>%1 ms</source> <translation>%1 ms</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <source>&amp;Save Image...</source> <translation>&amp;Sauvegarder l'image...</translation> </message> <message> <source>&amp;Copy Image</source> <translation>&amp;Copier l'image</translation> </message> <message> <source>Save QR Code</source> <translation>Sauvegarder le code QR</translation> </message> <message> <source>PNG Image (*.png)</source> <translation>Image PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <source>Tools window</source> <translation>Fenêtre des outils</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Informations</translation> </message> <message> <source>General</source> <translation>Général</translation> </message> <message> <source>Name</source> <translation>Nom</translation> </message> <message> <source>Client name</source> <translation>Nom du client</translation> </message> <message> <source>N/A</source> <translation>N.D.</translation> </message> <message> <source>Number of connections</source> <translation>Nombre de connexions</translation> </message> <message> <source>Open the Mtucicoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Ouvrir le fichier de debug Mtucicoin depuis le répertoire de données actuel. Ceci peut prendre plusieurs secondes pour un fichier de debug imposant.</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Ouvrir</translation> </message> <message> <source>Startup time</source> <translation>Heure de démarrage</translation> </message> <message> <source>Network</source> <translation>Réseau</translation> </message> <message> <source>Last block time</source> <translation>Horodatage du dernier bloc</translation> </message> <message> <source>Debug log file</source> <translation>Journal de débogage</translation> </message> <message> <source>Using OpenSSL version</source> <translation>Version d'OpenSSL utilisée</translation> </message> <message> <source>Build date</source> <translation>Date de compilation</translation> </message> <message> <source>Current number of blocks</source> <translation>Nombre actuel de blocs</translation> </message> <message> <source>Client version</source> <translation>Version du client</translation> </message> <message> <source>Using BerkeleyDB version</source> <translation>Version BerkeleyDB utilisée</translation> </message> <message> <source>Block chain</source> <translation>Chaîne de blocs</translation> </message> <message> <source>Number of Masternodes</source> <translation>Nombre de Masternodes</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Console</translation> </message> <message> <source>Clear console</source> <translation>Nettoyer la console</translation> </message> <message> <source>&amp;Network Traffic</source> <translation>Trafic &amp;réseau</translation> </message> <message> <source>&amp;Clear</source> <translation>&amp;Nettoyer</translation> </message> <message> <source>Totals</source> <translation>Totaux</translation> </message> <message> <source>Received</source> <translation>Reçus</translation> </message> <message> <source>Sent</source> <translation>Envoyés</translation> </message> <message> <source>&amp;Peers</source> <translation>Liste des &amp;Pairs</translation> </message> <message> <source>Select a peer to view detailed information.</source> <translation>Choisir un pair pour voir les informations détaillées.</translation> </message> <message> <source>Direction</source> <translation>Direction</translation> </message> <message> <source>Version</source> <translation>Version</translation> </message> <message> <source>User Agent</source> <translation>Agent de l'utilisateur</translation> </message> <message> <source>Services</source> <translation>Services</translation> </message> <message> <source>Starting Height</source> <translation>Hauteur de Démarrage</translation> </message> <message> <source>Sync Height</source> <translation>Hauteur de Synchro</translation> </message> <message> <source>Ban Score</source> <translation>Score d'interdiction</translation> </message> <message> <source>Connection Time</source> <translation>Temps de Connexion</translation> </message> <message> <source>Last Send</source> <translation>Dernier Envoi</translation> </message> <message> <source>Last Receive</source> <translation>Dernière Reception</translation> </message> <message> <source>Bytes Sent</source> <translation>Octets Envoyés</translation> </message> <message> <source>Bytes Received</source> <translation>Octets Reçus</translation> </message> <message> <source>Ping Time</source> <translation>Temps de Ping</translation> </message> <message> <source>&amp;Wallet Repair</source> <translation>&amp;Réparation de Portefeuille</translation> </message> <message> <source>Salvage wallet</source> <translation>Sauvetage de portefeuille</translation> </message> <message> <source>Rescan blockchain files</source> <translation>Scanner à nouveau les fichiers de la chaîne de blocs</translation> </message> <message> <source>Recover transactions 1</source> <translation>Récupérer les transactions 1</translation> </message> <message> <source>Recover transactions 2</source> <translation>Récupérer les transactions 2</translation> </message> <message> <source>Upgrade wallet format</source> <translation>Mise à jour du format du portefeuille</translation> </message> <message> <source>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</source> <translation>Les boutons ci-dessous redémarreront le portefeuille avec des paramètres de ligne de commande pour réparer le portefeuille, corriger des problèmes de fichiers corrompus de chaine de blocs ou de transactions manquantes ou obsolètes</translation> </message> <message> <source>-salvagewallet: Attempt to recover private keys from a corrupt wallet.dat.</source> <translation>-salvagewallet: Tenter de récupérer les clés privées d'un wallet.dat corrompu</translation> </message> <message> <source>-rescan: Rescan the block chain for missing wallet transactions.</source> <translation>-rescan: Réanalyser la chaine de blocs pour les transactions de portefeuille manquantes</translation> </message> <message> <source>-zapwallettxes=1: Recover transactions from blockchain (keep meta-data, e.g. account owner).</source> <translation>-zapwallettxes=1: Récupère les transactions depuis la chaine de blocs (en gardant les méta-données, ex. le nom du compte).</translation> </message> <message> <source>-zapwallettxes=2: Recover transactions from blockchain (drop meta-data).</source> <translation>-zapwallettxes=2: Récupère les transactions depuis la chaine de blocs (sans garder les méta-données).</translation> </message> <message> <source>-upgradewallet: Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself!)</source> <translation>-upgradewallet: Mise à jour du format du fichier wallet.dat vers la dernière version au démarrage. (Note: ce n'est PAS une mise à jour du logiciel portefeuille!)</translation> </message> <message> <source>Wallet repair options.</source> <translation>Options de réparation du portefeuille.</translation> </message> <message> <source>Rebuild index</source> <translation>Reconstruire l'index</translation> </message> <message> <source>-reindex: Rebuild block chain index from current blk000??.dat files.</source> <translation>-reindex: Reconstruire l'index de la chaine de blocs à partir des fichiers blk000??.dat actuels.</translation> </message> <message> <source>In:</source> <translation>Entrant :</translation> </message> <message> <source>Out:</source> <translation>Sortant :</translation> </message> <message> <source>Welcome to the Mtucicoin RPC console.</source> <translation>Bienvenue sur la console RPC de Mtucicoin.</translation> </message> <message> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Utiliser les touches de curseur pour naviguer dans l'historique et &lt;b&gt;Ctrl-L&lt;/b&gt; pour effacer l'écran.</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Taper &lt;b&gt;help&lt;/b&gt; pour afficher une vue générale des commandes disponibles.</translation> </message> <message> <source>%1 B</source> <translation>%1 o</translation> </message> <message> <source>%1 KB</source> <translation>%1 Ko</translation> </message> <message> <source>%1 MB</source> <translation>%1 Mo</translation> </message> <message> <source>%1 GB</source> <translation>%1 Go</translation> </message> <message> <source>via %1</source> <translation>via %1</translation> </message> <message> <source>never</source> <translation>jamais</translation> </message> <message> <source>Inbound</source> <translation>Arrivant</translation> </message> <message> <source>Outbound</source> <translation>Sortant</translation> </message> <message> <source>Unknown</source> <translation>Inconnus</translation> </message> <message> <source>Fetching...</source> <translation>Récupération...</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>Reuse one of the previously used receiving addresses.&lt;br&gt;Reusing addresses has security and privacy issues.&lt;br&gt;Do not use this unless re-generating a payment request made before.</source> <translation>Réutilise une adresse de réception précédemment utilisée.&lt;br&gt;Réutiliser une adresse pose des problèmes de sécurité et de vie privée.&lt;br&gt;N'utilisez pas cette option sauf si vous générez à nouveau une demande de paiement déjà faite.</translation> </message> <message> <source>R&amp;euse an existing receiving address (not recommended)</source> <translation>Ré&amp;utiliser une adresse de réception existante (non recommandé)</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Mtucicoin network.</source> <translation>Un message optionnel à joindre à la demande de paiement, qui sera affiché quand la demande sera ouverte. Note : Ce message ne sera pas envoyé avec le paiement à travers le réseau Mtucicoin.</translation> </message> <message> <source>&amp;Message:</source> <translation>M&amp;essage :</translation> </message> <message> <source>An optional label to associate with the new receiving address.</source> <translation>Un étiquette optionnelle à associer à la nouvelle adresse de réception</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened.&lt;br&gt;Note: The message will not be sent with the payment over the Mtucicoin network.</source> <translation>Un message optionnel à joindre à la demande de paiement, qui sera affiché quand la demande sera ouverte.&lt;br&gt;Note : Ce message ne sera pas envoyé avec le paiement à travers le réseau Mtucicoin.</translation> </message> <message> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation>Utiliser ce formulaire pour demander des paiements. Tous les champs sont &lt;b&gt;optionnels&lt;/b&gt;.</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Étiquette :</translation> </message> <message> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation>Un montant optionnel à demander. Laisser ceci vide ou à zéro pour ne pas demander de montant spécifique.</translation> </message> <message> <source>&amp;Amount:</source> <translation>&amp;Montant :</translation> </message> <message> <source>&amp;Request payment</source> <translation>&amp;Demande de paiement</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Effacer tous les champs du formulaire.</translation> </message> <message> <source>Clear</source> <translation>Effacer</translation> </message> <message> <source>Requested payments history</source> <translation>Historique des paiements demandés</translation> </message> <message> <source>Show the selected request (does the same as double clicking an entry)</source> <translation>Afficher la demande choisie (identique à un double-clic sur une entrée)</translation> </message> <message> <source>Show</source> <translation>Afficher</translation> </message> <message> <source>Remove the selected entries from the list</source> <translation>Enlever les entrées sélectionnées de la liste</translation> </message> <message> <source>Remove</source> <translation>Enlever</translation> </message> <message> <source>Copy label</source> <translation>Copier l’étiquette</translation> </message> <message> <source>Copy message</source> <translation>Copier le message</translation> </message> <message> <source>Copy amount</source> <translation>Copier le montant</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>QR Code</source> <translation>Code QR</translation> </message> <message> <source>Copy &amp;URI</source> <translation>Copier l'&amp;URI</translation> </message> <message> <source>Copy &amp;Address</source> <translation>Copier l'&amp;adresse</translation> </message> <message> <source>&amp;Save Image...</source> <translation>&amp;Sauvegarder l'image...</translation> </message> <message> <source>Request payment to %1</source> <translation>Demande de paiement à %1</translation> </message> <message> <source>Payment information</source> <translation>Informations de paiement</translation> </message> <message> <source>URI</source> <translation>URI</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>Amount</source> <translation>Montant</translation> </message> <message> <source>Label</source> <translation>Étiquette</translation> </message> <message> <source>Message</source> <translation>Message</translation> </message> <message> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>L'URI résultant est trop long, essayez de réduire le texte d'étiquette / de message.</translation> </message> <message> <source>Error encoding URI into QR Code.</source> <translation>Erreur d'encodage de l'URI en code QR.</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Date</translation> </message> <message> <source>Label</source> <translation>Étiquette</translation> </message> <message> <source>Message</source> <translation>Message</translation> </message> <message> <source>Amount</source> <translation>Montant</translation> </message> <message> <source>(no label)</source> <translation>(pas d'étiquette)</translation> </message> <message> <source>(no message)</source> <translation>(pas de message)</translation> </message> <message> <source>(no amount)</source> <translation>(aucun montant)</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Send Coins</source> <translation>Envoyer des pièces</translation> </message> <message> <source>Coin Control Features</source> <translation>Fonctions de contrôle des pièces</translation> </message> <message> <source>Inputs...</source> <translation>Entrées...</translation> </message> <message> <source>automatically selected</source> <translation>choisi automatiquement</translation> </message> <message> <source>Insufficient funds!</source> <translation>Fonds insuffisants !</translation> </message> <message> <source>Quantity:</source> <translation>Quantité :</translation> </message> <message> <source>Bytes:</source> <translation>Octets :</translation> </message> <message> <source>Amount:</source> <translation>Montant :</translation> </message> <message> <source>Priority:</source> <translation>Priorité :</translation> </message> <message> <source>medium</source> <translation>moyen</translation> </message> <message> <source>Fee:</source> <translation>Frais :</translation> </message> <message> <source>Dust:</source> <translation>Poussière:</translation> </message> <message> <source>no</source> <translation>non</translation> </message> <message> <source>After Fee:</source> <translation>Après les frais :</translation> </message> <message> <source>Change:</source> <translation>Monnaie :</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Si ceci est actif mais l'adresse de monnaie rendue est vide ou invalide, la monnaie sera envoyée vers une adresse nouvellement générée.</translation> </message> <message> <source>Custom change address</source> <translation>Adresse personnalisée de monnaie rendue</translation> </message> <message> <source>Transaction Fee:</source> <translation>Frais de Transaction:</translation> </message> <message> <source>Choose...</source> <translation>Choisissez...</translation> </message> <message> <source>collapse fee-settings</source> <translation>replier les paramètres de frais</translation> </message> <message> <source>Minimize</source> <translation>Minimiser</translation> </message> <message> <source>If the custom fee is set to 1000 duffs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 duffs in fee,&lt;br /&gt;while "at least" pays 1000 duffs. For transactions bigger than a kilobyte both pay by kilobyte.</source> <translation>Si les frais personnalisés sont à 1000 duffs et que la transaction fait seulement 250 octets, alors "par kilooctet" payera seulement 250 duffs de frais,&lt;br /&gt;alors que "au moins" payera 1000 duffs. Pour les transactions de plus d'un kilooctet les deux payeront par kilooctet.</translation> </message> <message> <source>If the custom fee is set to 1000 duffs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 duffs in fee,&lt;br /&gt;while "total at least" pays 1000 duffs. For transactions bigger than a kilobyte both pay by kilobyte.</source> <translation>Si les frais personnalisés sont à 1000 duffs et que la transaction fait seulement 250 octets, alors "par kilooctet" payera seulement 250 duffs de frais,&lt;br /&gt;alors que "total au moins" payera 1000 duffs. Pour les transactions de plus d'un kilooctet les deux payeront par kilooctet.</translation> </message> <message> <source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks.&lt;br /&gt;But be aware that this can end up in a never confirming transaction once there is more demand for mtucicoin transactions than the network can process.</source> <translation>Payer les frais minimums fonctionne tant qu'il y a moins de volume de transactions que de place dans les blocs.&lt;br/&gt;Mais soyez conscients que ceci peut amener a des transactions qui ne seront jamais confirmées lorsqu'il y aura plus de demande que la capacité du réseau.</translation> </message> <message> <source>per kilobyte</source> <translation>par kilooctet</translation> </message> <message> <source>total at least</source> <translation>total au moins</translation> </message> <message> <source>(read the tooltip)</source> <translation>(lisez l'infobulle)</translation> </message> <message> <source>Recommended:</source> <translation>Recommandé:</translation> </message> <message> <source>Custom:</source> <translation>Personnalisé:</translation> </message> <message> <source>(Smart fee not initialized yet. This usually takes a few blocks...)</source> <translation>(Les frais intelligents ne sont pas encore initialisés . Ceci nécessite quelques blocs généralement...)</translation> </message> <message> <source>Confirmation time:</source> <translation>Temps de Confirmation:</translation> </message> <message> <source>normal</source> <translation>normal</translation> </message> <message> <source>fast</source> <translation>rapide</translation> </message> <message> <source>Send as zero-fee transaction if possible</source> <translation>Envoyé en tant que transaction sans frais si possible</translation> </message> <message> <source>(confirmation may take longer)</source> <translation>(la confirmation pourra prendre plus de temps)</translation> </message> <message> <source>Confirm the send action</source> <translation>Confirmer l’action d'envoi</translation> </message> <message> <source>S&amp;end</source> <translation>E&amp;nvoyer</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Effacer tous les champs du formulaire.</translation> </message> <message> <source>Clear &amp;All</source> <translation>&amp;Tout nettoyer</translation> </message> <message> <source>Send to multiple recipients at once</source> <translation>Envoyer à plusieurs destinataires à la fois</translation> </message> <message> <source>Add &amp;Recipient</source> <translation>Ajouter un &amp;destinataire</translation> </message> <message> <source>Darksend</source> <translation>Darksend</translation> </message> <message> <source>InstantX</source> <translation>InstantX</translation> </message> <message> <source>Balance:</source> <translation>Solde :</translation> </message> <message> <source>Copy quantity</source> <translation>Copier la quantité</translation> </message> <message> <source>Copy amount</source> <translation>Copier le montant</translation> </message> <message> <source>Copy fee</source> <translation>Copier les frais</translation> </message> <message> <source>Copy after fee</source> <translation>Copier le montant après les frais</translation> </message> <message> <source>Copy bytes</source> <translation>Copier les octets</translation> </message> <message> <source>Copy priority</source> <translation>Copier la priorité</translation> </message> <message> <source>Copy dust</source> <translation>Copier poussière</translation> </message> <message> <source>Copy change</source> <translation>Copier la monnaie</translation> </message> <message> <source>using</source> <translation>utiliser</translation> </message> <message> <source>anonymous funds</source> <translation>fonds anonymisés</translation> </message> <message> <source>(darksend requires this amount to be rounded up to the nearest %1).</source> <translation>(darksend nécessite que ce montant soit arrondi au plus proche de %1).</translation> </message> <message> <source>any available funds (not recommended)</source> <translation>tout fonds disponible (non recommandé)</translation> </message> <message> <source>and InstantX</source> <translation>et InstantX</translation> </message> <message> <source>%1 to %2</source> <translation>%1 à %2</translation> </message> <message> <source>Are you sure you want to send?</source> <translation>Êtes-vous sûr de vouloir envoyer ?</translation> </message> <message> <source>are added as transaction fee</source> <translation>ajouté en tant que frais de transaction</translation> </message> <message> <source>Total Amount = &lt;b&gt;%1&lt;/b&gt;&lt;br /&gt;= %2</source> <translation>Montant Total = &lt;b&gt;%1&lt;/b&gt;&lt;br /&gt;= %2</translation> </message> <message> <source>Confirm send coins</source> <translation>Confirmer l’envoi des pièces</translation> </message> <message> <source>A fee %1 times higher than %2 per kB is considered an insanely high fee.</source> <translation>Des frais %1 plus grands que %2 par koctets sont considéres comme excessifs.</translation> </message> <message numerus="yes"> <source>Estimated to begin confirmation within %n block(s).</source> <translation><numerusform>Le début de confirmation est estimé dans %n bloc.</numerusform><numerusform>Le début de confirmation est estimé dans les %n blocs.</numerusform></translation> </message> <message> <source>The recipient address is not valid, please recheck.</source> <translation>L'adresse du destinataire n’est pas valide, veuillez la vérifier.</translation> </message> <message> <source>&lt;b&gt;(%1 of %2 entries displayed)&lt;/b&gt;</source> <translation>&lt;b&gt;(%1 sur %2 entrées affichées)&lt;/b&gt;</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Le montant à payer doit être supérieur à 0.</translation> </message> <message> <source>The amount exceeds your balance.</source> <translation>Le montant dépasse votre solde.</translation> </message> <message> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Le montant dépasse votre solde lorsque les frais de transaction de %1 sont inclus.</translation> </message> <message> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Adresse indentique trouvée, il n'est possible d'envoyer qu'une fois à chaque adresse par opération d'envoi.</translation> </message> <message> <source>Transaction creation failed!</source> <translation>La création de la transaction a échoué !</translation> </message> <message> <source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>La transaction a été rejetée ! Ceci peut arriver si certaines pièces de votre portefeuille étaient déjà dépensées, par exemple si vous avez utilisé une copie de wallet.dat et que des pièces ont été dépensées dans la copie sans être marquées comme telles ici.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins.</source> <translation>Erreur: Le portefeuille a été déverouillé seulement pour l'anonymisation des pièces.</translation> </message> <message> <source>Pay only the minimum fee of %1</source> <translation>Payer seulement les frais minimum de %1</translation> </message> <message> <source>Warning: Invalid Mtucicoin address</source> <translation>Attention: adresse Mtucicoin invalide</translation> </message> <message> <source>Warning: Unknown change address</source> <translation>Attention : adresse de monnaie rendue inconnue</translation> </message> <message> <source>(no label)</source> <translation>(pas d'étiquette)</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>This is a normal payment.</source> <translation>Ceci est un paiement normal.</translation> </message> <message> <source>Pay &amp;To:</source> <translation>&amp;Payer à :</translation> </message> <message> <source>The Mtucicoin address to send the payment to</source> <translation>L'adresse Mtucicoin à laquelle envoyer de la monnaie</translation> </message> <message> <source>Choose previously used address</source> <translation>Choisir une adresse déjà utilisée</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Coller l'adresse depuis le presse-papier</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Remove this entry</source> <translation>Enlever cette entrée</translation> </message> <message> <source>&amp;Label:</source> <translation>É&amp;tiquette :</translation> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Saisir une étiquette pour cette adresse afin de l'ajouter à la liste d'adresses utilisées</translation> </message> <message> <source>A&amp;mount:</source> <translation>&amp;Montant :</translation> </message> <message> <source>Message:</source> <translation>Message :</translation> </message> <message> <source>A message that was attached to the mtucicoin: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Mtucicoin network.</source> <translation>Un message qui était joint au Mtucicoin : URI qui sera sauvegardée avec la transaction pour référence. Note : Ce message ne sera pas envoyé à travers le réseau Mtucicoin.</translation> </message> <message> <source>This is an unverified payment request.</source> <translation>Ceci est une demande de paiement non vérifiée.</translation> </message> <message> <source>Pay To:</source> <translation>Payer à :</translation> </message> <message> <source>Memo:</source> <translation>Mémo :</translation> </message> <message> <source>This is a verified payment request.</source> <translation>Ceci est une demande de paiement vérifiée.</translation> </message> <message> <source>Enter a label for this address to add it to your address book</source> <translation>Saisir une étiquette pour cette adresse afin de l’ajouter à votre carnet d’adresses</translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <source>Mtucicoin Core is shutting down...</source> <translation>Arrêt de Mtucicoin Core...</translation> </message> <message> <source>Do not shut down the computer until this window disappears.</source> <translation>Ne pas fermer l'ordinateur jusqu'à la disparition de cette fenêtre.</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures - Signer / Vérifier un message</translation> </message> <message> <source>&amp;Sign Message</source> <translation>&amp;Signer un message</translation> </message> <message> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Vous pouvez signer des messages avec vos adresses pour prouver que vous les détenez. Faites attention de ne pas signer de vague car des attaques d'hameçonnage peuvent essayer d'usurper votre identité par votre signature. Ne signez que des déclarations entièrement détaillées et avec lesquelles vous serez d'accord.</translation> </message> <message> <source>The Mtucicoin address to sign the message with</source> <translation>L'adresse Mtucicoin avec laquelle signer le message</translation> </message> <message> <source>Choose previously used address</source> <translation>Choisir une adresse précédemment utilisée</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Coller une adresse depuis le presse-papier</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Enter the message you want to sign here</source> <translation>Saisir ici le message que vous désirez signer</translation> </message> <message> <source>Signature</source> <translation>Signature</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Copier la signature actuelle dans le presse-papier</translation> </message> <message> <source>Sign the message to prove you own this Mtucicoin address</source> <translation>Signer le message pour prouver que vous possédez cette adresse Mtucicoin</translation> </message> <message> <source>Sign &amp;Message</source> <translation>Signer le &amp;message</translation> </message> <message> <source>Reset all sign message fields</source> <translation>Réinitialiser tous les champs de signature de message</translation> </message> <message> <source>Clear &amp;All</source> <translation>&amp;Tout nettoyer</translation> </message> <message> <source>&amp;Verify Message</source> <translation>&amp;Vérifier un message</translation> </message> <message> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Saisir ci-dessous l'adresse de signature, le message (assurez-vous d'avoir copié exactement les retours à la ligne, les espaces, tabulations etc.) et la signature pour vérifier le message. Faire attention à ne pas déduire davantage de la signature que ce qui est contenu dans le message signé lui-même pour éviter d'être trompé par une attaque d'homme du milieu.</translation> </message> <message> <source>The Mtucicoin address the message was signed with</source> <translation>L'adresse Mtucicoin avec laquelle le message a été signé</translation> </message> <message> <source>Verify the message to ensure it was signed with the specified Mtucicoin address</source> <translation>Vérifier le message pour s'assurer qu'il a été signé avec l'adresse Mtucicoin spécifiée</translation> </message> <message> <source>Verify &amp;Message</source> <translation>Vérifier le &amp;message</translation> </message> <message> <source>Reset all verify message fields</source> <translation>Réinitialiser tous les champs de vérification de message</translation> </message> <message> <source>Click "Sign Message" to generate signature</source> <translation>Cliquez sur « Signer le message » pour générer la signature</translation> </message> <message> <source>The entered address is invalid.</source> <translation>L'adresse saisie est invalide.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Veuillez vérifier l'adresse et réessayer.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>L'adresse saisie ne fait pas référence à une clef.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Le déverrouillage du portefeuille a été annulé.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>La clef privée pour l'adresse indiquée n'est pas disponible.</translation> </message> <message> <source>Message signing failed.</source> <translation>La signature du message a échoué.</translation> </message> <message> <source>Message signed.</source> <translation>Le message a été signé.</translation> </message> <message> <source>The signature could not be decoded.</source> <translation>La signature n'a pu être décodée.</translation> </message> <message> <source>Please check the signature and try again.</source> <translation>Veuillez vérifier la signature et réessayer.</translation> </message> <message> <source>The signature did not match the message digest.</source> <translation>La signature ne correspond pas à l'empreinte du message.</translation> </message> <message> <source>Message verification failed.</source> <translation>Échec de la vérification du message.</translation> </message> <message> <source>Message verified.</source> <translation>Message vérifié.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <source>Mtucicoin Core</source> <translation>Mtucicoin Core</translation> </message> <message> <source>Version %1</source> <translation>Version %1</translation> </message> <message> <source>The Bitcoin Core developers</source> <translation>Les développeurs Bitcoin Core</translation> </message> <message> <source>The Mtucicoin Core developers</source> <translation>Les développeurs Mtucicoin Core</translation> </message> <message> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <source>KB/s</source> <translation>Ko/s</translation> </message> </context> <context> <name>TransactionDesc</name> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>Ouvert pour %n bloc de plus</numerusform><numerusform>Ouvert pour %n blocs de plus</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Ouvert jusqu'à %1</translation> </message> <message> <source>conflicted</source> <translation>en conflit</translation> </message> <message> <source>%1/offline (verified via instantx)</source> <translation>%1/déconnecté (vérifié avec instantx)</translation> </message> <message> <source>%1/confirmed (verified via instantx)</source> <translation>%1/confirmé (verifié avec instantx)</translation> </message> <message> <source>%1 confirmations (verified via instantx)</source> <translation>%1 confirmations (verifié avec instantx)</translation> </message> <message> <source>%1/offline</source> <translation>%1/déconnecté</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1/non confirmée</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 confirmations</translation> </message> <message> <source>%1/offline (InstantX verification in progress - %2 of %3 signatures)</source> <translation>%1/déconnecté (vérification d'InstantX en cours - %2 sur %3 signatures)</translation> </message> <message> <source>%1/confirmed (InstantX verification in progress - %2 of %3 signatures )</source> <translation>%1/confirmé (vérification d'InstantX en cours - %2 sur %3 signatures)</translation> </message> <message> <source>%1 confirmations (InstantX verification in progress - %2 of %3 signatures)</source> <translation>%1 confirmations (vérification d'InstantX en cours - %2 sur %3 signatures)</translation> </message> <message> <source>%1/offline (InstantX verification failed)</source> <translation>%1/déconnecté (La vérification d'InstantX a échoué)</translation> </message> <message> <source>%1/confirmed (InstantX verification failed)</source> <translation>%1/confirmé (La vérification d'InstantX a échoué)</translation> </message> <message> <source>Status</source> <translation>État</translation> </message> <message> <source>, has not been successfully broadcast yet</source> <translation>, n’a pas encore été diffusée avec succès</translation> </message> <message numerus="yes"> <source>, broadcast through %n node(s)</source> <translation><numerusform>, diffusée à travers %n nœud</numerusform><numerusform>, diffusée à travers %n nœuds</numerusform></translation> </message> <message> <source>Date</source> <translation>Date</translation> </message> <message> <source>Source</source> <translation>Source</translation> </message> <message> <source>Generated</source> <translation>Généré</translation> </message> <message> <source>From</source> <translation>De</translation> </message> <message> <source>unknown</source> <translation>inconnu</translation> </message> <message> <source>To</source> <translation>À</translation> </message> <message> <source>own address</source> <translation>votre propre adresse</translation> </message> <message> <source>watch-only</source> <translation>lecture seule</translation> </message> <message> <source>label</source> <translation>étiquette</translation> </message> <message> <source>Credit</source> <translation>Crédit</translation> </message> <message numerus="yes"> <source>matures in %n more block(s)</source> <translation><numerusform>arrive à maturité dans %n bloc de plus</numerusform><numerusform>arrive à maturité dans %n blocs de plus</numerusform></translation> </message> <message> <source>not accepted</source> <translation>refusé</translation> </message> <message> <source>Debit</source> <translation>Débit</translation> </message> <message> <source>Total debit</source> <translation>Total débit</translation> </message> <message> <source>Total credit</source> <translation>Total crédit</translation> </message> <message> <source>Transaction fee</source> <translation>Frais de transaction</translation> </message> <message> <source>Net amount</source> <translation>Montant net</translation> </message> <message> <source>Message</source> <translation>Message</translation> </message> <message> <source>Comment</source> <translation>Commentaire</translation> </message> <message> <source>Transaction ID</source> <translation>ID de la transaction</translation> </message> <message> <source>Merchant</source> <translation>Marchand</translation> </message> <message> <source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Les pièces générées doivent mûrir pendant %1 blocs avant de pouvoir être dépensées. Lorsque vous avez généré ce bloc, il a été diffusé sur le réseau pour être ajouté à la chaîne de blocs. S’il échoue a intégrer la chaîne, son état sera modifié en « non accepté » et il ne sera pas possible de le dépenser. Ceci peut arriver occasionnellement si un autre nœud génère un bloc à quelques secondes du votre.</translation> </message> <message> <source>Debug information</source> <translation>Informations de débogage</translation> </message> <message> <source>Transaction</source> <translation>Transaction</translation> </message> <message> <source>Inputs</source> <translation>Entrées</translation> </message> <message> <source>Amount</source> <translation>Montant</translation> </message> <message> <source>true</source> <translation>vrai</translation> </message> <message> <source>false</source> <translation>faux</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <source>Transaction details</source> <translation>Détails de la transaction</translation> </message> <message> <source>This pane shows a detailed description of the transaction</source> <translation>Ce panneau affiche une description détaillée de la transaction</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Date</translation> </message> <message> <source>Type</source> <translation>Type</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>Ouvert pour %n bloc de plus</numerusform><numerusform>Ouvert pour %n blocs de plus</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Ouvert jusqu'à %1</translation> </message> <message> <source>Offline</source> <translation>Déconnecté</translation> </message> <message> <source>Unconfirmed</source> <translation>Non confirmé</translation> </message> <message> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Confirmation (%1 sur %2 confirmations recommandées)</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Confirmée (%1 confirmations)</translation> </message> <message> <source>Conflicted</source> <translation>En conflit</translation> </message> <message> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Immature (%1 confirmations, sera disponible après %2)</translation> </message> <message> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Ce bloc n’a été reçu par aucun autre nœud et ne sera probablement pas accepté !</translation> </message> <message> <source>Generated but not accepted</source> <translation>Généré mais pas accepté</translation> </message> <message> <source>Received with</source> <translation>Reçue avec</translation> </message> <message> <source>Received from</source> <translation>Reçue de</translation> </message> <message> <source>Received via Darksend</source> <translation>Reçu par Darksend</translation> </message> <message> <source>Sent to</source> <translation>Envoyée à</translation> </message> <message> <source>Payment to yourself</source> <translation>Paiement à vous-même</translation> </message> <message> <source>Mined</source> <translation>Miné</translation> </message> <message> <source>Darksend Denominate</source> <translation>Dénomination Darksend</translation> </message> <message> <source>Darksend Collateral Payment</source> <translation>Paiement Darksend Collatéral</translation> </message> <message> <source>Darksend Make Collateral Inputs</source> <translation>Darksend Création d'Entrées Collatérales</translation> </message> <message> <source>Darksend Create Denominations</source> <translation>Darksend Création de Dénominations</translation> </message> <message> <source>Darksent</source> <translation>Darksent</translation> </message> <message> <source>watch-only</source> <translation>lecture seule</translation> </message> <message> <source>(n/a)</source> <translation>(n.d)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>État de la transaction. Laissez le pointeur de la souris sur ce champ pour voir le nombre de confirmations.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>Date et heure de réception de la transaction.</translation> </message> <message> <source>Type of transaction.</source> <translation>Type de transaction.</translation> </message> <message> <source>Whether or not a watch-only address is involved in this transaction.</source> <translation>Si une adresse en lecture seule est impliquée dans cette transaction.</translation> </message> <message> <source>Destination address of transaction.</source> <translation>L’adresse de destination de la transaction.</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Montant ajouté ou enlevé au solde.</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>All</source> <translation>Toutes</translation> </message> <message> <source>Today</source> <translation>Aujourd’hui</translation> </message> <message> <source>This week</source> <translation>Cette semaine</translation> </message> <message> <source>This month</source> <translation>Ce mois-ci</translation> </message> <message> <source>Last month</source> <translation>Le mois dernier</translation> </message> <message> <source>This year</source> <translation>Cette année</translation> </message> <message> <source>Range...</source> <translation>Intervalle...</translation> </message> <message> <source>Most Common</source> <translation>Les Plus Courants</translation> </message> <message> <source>Received with</source> <translation>Reçue avec</translation> </message> <message> <source>Sent to</source> <translation>Envoyée à</translation> </message> <message> <source>Darksent</source> <translation>Darksent</translation> </message> <message> <source>Darksend Make Collateral Inputs</source> <translation>Darksend Création d'Entrées Collatérales</translation> </message> <message> <source>Darksend Create Denominations</source> <translation>Darksend Création de Dénominations</translation> </message> <message> <source>Darksend Denominate</source> <translation>Dénomination Darksend</translation> </message> <message> <source>Darksend Collateral Payment</source> <translation>Paiement Darksend Collatéral</translation> </message> <message> <source>To yourself</source> <translation>À vous-même</translation> </message> <message> <source>Mined</source> <translation>Miné</translation> </message> <message> <source>Other</source> <translation>Autres</translation> </message> <message> <source>Enter address or label to search</source> <translation>Saisir une adresse ou une étiquette à rechercher</translation> </message> <message> <source>Min amount</source> <translation>Montant min.</translation> </message> <message> <source>Copy address</source> <translation>Copier l’adresse</translation> </message> <message> <source>Copy label</source> <translation>Copier l’étiquette</translation> </message> <message> <source>Copy amount</source> <translation>Copier le montant</translation> </message> <message> <source>Copy transaction ID</source> <translation>Copier l'ID de la transaction</translation> </message> <message> <source>Edit label</source> <translation>Modifier l’étiquette</translation> </message> <message> <source>Show transaction details</source> <translation>Afficher les détails de la transaction</translation> </message> <message> <source>Export Transaction History</source> <translation>Exporter l'historique des transactions</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Valeurs séparées par des virgules (*.csv)</translation> </message> <message> <source>Confirmed</source> <translation>Confirmée</translation> </message> <message> <source>Watch-only</source> <translation>Lecture seule</translation> </message> <message> <source>Date</source> <translation>Date</translation> </message> <message> <source>Type</source> <translation>Type</translation> </message> <message> <source>Label</source> <translation>Étiquette</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>ID</source> <translation>ID</translation> </message> <message> <source>Exporting Failed</source> <translation>L'exportation a échoué</translation> </message> <message> <source>There was an error trying to save the transaction history to %1.</source> <translation>Une erreur est survenue lors de l'enregistrement de l'historique des transactions vers %1.</translation> </message> <message> <source>Exporting Successful</source> <translation>Exportation réussie</translation> </message> <message> <source>The transaction history was successfully saved to %1.</source> <translation>L'historique des transactions a été sauvegardée avec succès vers %1.</translation> </message> <message> <source>Range:</source> <translation>Intervalle :</translation> </message> <message> <source>to</source> <translation>à</translation> </message> </context> <context> <name>UnitDisplayStatusBarControl</name> <message> <source>Unit to show amounts in. Click to select another unit.</source> <translation>Unité utilisée pour montrer les montants. Cliquez pour choisir une autre unité.</translation> </message> </context> <context> <name>WalletFrame</name> <message> <source>No wallet has been loaded.</source> <translation>Aucun portefeuille de chargé.</translation> </message> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>Envoyer des pièces</translation> </message> <message> <source>InstantX doesn't support sending values that high yet. Transactions are currently limited to %1 MTUCICOIN.</source> <translation>InstantX ne supporte pas des transferts aussi élevés. Les transactions sont pour le moment limitées à %11 MTUCICOIN.</translation> </message> </context> <context> <name>WalletView</name> <message> <source>&amp;Export</source> <translation>&amp;Exporter</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Exporter les données de l'onglet courant vers un fichier</translation> </message> <message> <source>Selected amount:</source> <translation>Montant sélectionné:</translation> </message> <message> <source>Backup Wallet</source> <translation>Sauvegarder le portefeuille</translation> </message> <message> <source>Wallet Data (*.dat)</source> <translation>Données de portefeuille (*.dat)</translation> </message> <message> <source>Backup Failed</source> <translation>Échec de la sauvegarde</translation> </message> <message> <source>There was an error trying to save the wallet data to %1.</source> <translation>Une erreur est survenue lors de l'enregistrement des données de portefeuille vers %1.</translation> </message> <message> <source>Backup Successful</source> <translation>Sauvegarde réussie</translation> </message> <message> <source>The wallet data was successfully saved to %1.</source> <translation>Les données de portefeuille ont été enregistrées avec succès vers %1</translation> </message> </context> <context> <name>mtucicoin-core</name> <message> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Se lier à l'adresse donnée et toujours l'écouter. Utilisez la notation [host]:port pour l'IPv6</translation> </message> <message> <source>Cannot obtain a lock on data directory %s. Mtucicoin Core is probably already running.</source> <translation>Impossible d’obtenir un verrou sur le répertoire de données %s. Mtucicoin Core fonctionne probablement déjà.</translation> </message> <message> <source>Darksend uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source> <translation>Darksend utilise les montants dénominés exacts pour envoyer des fonds, vous pourriez simplement avoir besoin d'anonymiser plus de pièces.</translation> </message> <message> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source> <translation>Passer en mode de test de régression qui utilise une chaîne spéciale dans laquelle les blocs sont résolus instantanément.</translation> </message> <message> <source>Error: Listening for incoming connections failed (listen returned error %s)</source> <translation>Erreur: L'écoute de connections entrantes a échouée (erreur retournée: %s)</translation> </message> <message> <source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source> <translation>Exécuter une commande lorsqu'une alerte pertinente est reçue ou si nous voyons une bifurcation vraiment étendue (%s dans la commande est remplacé par le message)</translation> </message> <message> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Exécuter la commande lorsqu'une transaction de portefeuille change (%s dans la commande est remplacée par TxID)</translation> </message> <message> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Exécuter la commande lorsque le meilleur bloc change (%s dans cmd est remplacé par le hachage du bloc)</translation> </message> <message> <source>In this mode -genproclimit controls how many blocks are generated immediately.</source> <translation>Dans ce mode -genproclimit contrôle combien de blocs sont générés immédiatement.</translation> </message> <message> <source>InstantX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source> <translation>InstantX nécessite des entrées avec au moins 6 confirmations, vous devriez attendre quelques minutes avant de réessayer.</translation> </message> <message> <source>Name to construct url for KeePass entry that stores the wallet passphrase</source> <translation>Nom pour construire l'URL pour l'entrée KeePass qui conserve la phrase de passe du portefeuille</translation> </message> <message> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation>Requête pour adresses de pairs via recherche DNS, si peu d'adresses (par défaut: 1 sauf si -connect)</translation> </message> <message> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source> <translation>Définir la taille maximale en octets des transactions prioritaires/à frais modiques (par défaut : %d)</translation> </message> <message> <source>Set the number of script verification threads (%u to %d, 0 = auto, &lt;0 = leave that many cores free, default: %d)</source> <translation>Définir le nombre d'exétrons de vérification des scripts (%u à %d, 0 = auto, &lt; 0 = laisser ce nombre de cœurs inutilisés, par défaut : %d)</translation> </message> <message> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Ceci est une pré-version de test - l'utiliser à vos risques et périls - ne pas l'utiliser pour miner ou pour des applications marchandes</translation> </message> <message> <source>Unable to bind to %s on this computer. Mtucicoin Core is probably already running.</source> <translation>Impossible de se lier à %s sur cet ordinateur. Mtucicoin Core fonctionne probablement déjà.</translation> </message> <message> <source>Unable to locate enough Darksend denominated funds for this transaction.</source> <translation>Impossible de localiser suffisamment de fonds Darksend dénominés pour cette transaction.</translation> </message> <message> <source>Unable to locate enough Darksend non-denominated funds for this transaction that are not equal 1000 MTUCICOIN.</source> <translation>Impossible de localiser suffisamment de fonds non-dénominés Darksend pour cette transaction qui ne sont pas égaux à 1000 MTUCICOIN.</translation> </message> <message> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Attention : -paytxfee est réglée sur un montant très élevé ! Il s'agit des frais de transaction que vous payerez si vous envoyez une transaction.</translation> </message> <message> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation>Attention : Le réseau ne semble pas totalement d'accord ! Quelques mineurs semblent éprouver des difficultés.</translation> </message> <message> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Attention : Nous ne semblons pas être en accord complet avec nos pairs ! Vous pourriez avoir besoin d'effectuer une mise à niveau, ou d'autres nœuds du réseau pourraient avoir besoin d'effectuer une mise à niveau.</translation> </message> <message> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Avertissement : une erreur est survenue lors de la lecture de wallet.dat ! Toutes les clefs ont été lues correctement mais les données de transaction ou les entrées du carnet d'adresses sont peut-être incorrectes ou manquantes.</translation> </message> <message> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Avertissement : wallet.dat corrompu, données récupérées ! Le fichier wallet.dat original a été enregistré en tant que wallet.{timestamp}.bak dans %s ; si votre solde ou transactions sont incorrects vous devriez effectuer une restauration depuis une sauvegarde.</translation> </message> <message> <source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source> <translation>Vous devez définir masternodeprivkey dans la configuration. Veuillez consulter la documentation pour plus d'aide.</translation> </message> <message> <source>(default: 1)</source> <translation>(par défaut : 1)</translation> </message> <message> <source>Accept command line and JSON-RPC commands</source> <translation>Accepter les commandes de JSON-RPC et de la ligne de commande</translation> </message> <message> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Accepter les connexions entrantes (par défaut : 1 si aucun -proxy ou -connect )</translation> </message> <message> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Ajouter un nœud auquel se connecter et tenter de garder la connexion ouverte</translation> </message> <message> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Autoriser les recherches DNS pour -addnode, -seednode et -connect</translation> </message> <message> <source>Already have that input.</source> <translation>Entrée déjà présente.</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Tenter de récupérer les clefs privées d'un wallet.dat corrompu</translation> </message> <message> <source>Block creation options:</source> <translation>Options de création de bloc :</translation> </message> <message> <source>Can't denominate: no compatible inputs left.</source> <translation>Ne peux pas dénommée: pas d'entrées compatibles restantes.</translation> </message> <message> <source>Cannot downgrade wallet</source> <translation>Impossible de revenir à une version inférieure du portefeuille</translation> </message> <message> <source>Cannot resolve -bind address: '%s'</source> <translation>Impossible de résoudre l'adresse -bind : « %s »</translation> </message> <message> <source>Cannot resolve -externalip address: '%s'</source> <translation>Impossible de résoudre l'adresse -externalip : « %s »</translation> </message> <message> <source>Cannot write default address</source> <translation>Impossible d'écrire l'adresse par défaut</translation> </message> <message> <source>Collateral not valid.</source> <translation>Collatéral invalide.</translation> </message> <message> <source>Connect only to the specified node(s)</source> <translation>Ne se connecter qu'au(x) nœud(s) spécifié(s)</translation> </message> <message> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Se connecter à un nœud pour obtenir des adresses de pairs puis se déconnecter</translation> </message> <message> <source>Connection options:</source> <translation>Options de connexion :</translation> </message> <message> <source>Corrupted block database detected</source> <translation>Base corrompue de données des blocs détectée</translation> </message> <message> <source>Darksend options:</source> <translation>Options Darksend :</translation> </message> <message> <source>Debugging/Testing options:</source> <translation>Options de test/de débogage :</translation> </message> <message> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Découvrir sa propre adresse IP (par défaut : 1 lors de l'écoute et si aucun -externalip)</translation> </message> <message> <source>Do not load the wallet and disable wallet RPC calls</source> <translation>Ne pas charger le portefeuille et désactiver les appels RPC</translation> </message> <message> <source>Do you want to rebuild the block database now?</source> <translation>Voulez-vous reconstruire la base de données des blocs maintenant ?</translation> </message> <message> <source>Done loading</source> <translation>Chargement terminé</translation> </message> <message> <source>Entries are full.</source> <translation>Les entrées sont pleines.</translation> </message> <message> <source>Error initializing block database</source> <translation>Erreur lors de l'initialisation de la base de données des blocs</translation> </message> <message> <source>Error initializing wallet database environment %s!</source> <translation>Erreur lors de l'initialisation de l'environnement de la base de données du portefeuille %s !</translation> </message> <message> <source>Error loading block database</source> <translation>Erreur du chargement de la base de données des blocs</translation> </message> <message> <source>Error loading wallet.dat</source> <translation>Erreur lors du chargement de wallet.dat</translation> </message> <message> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Erreur lors du chargement de wallet.dat : portefeuille corrompu</translation> </message> <message> <source>Error opening block database</source> <translation>Erreur lors de l'ouverture de la base de données des blocs</translation> </message> <message> <source>Error reading from database, shutting down.</source> <translation>Erreur à la lecture de la base de données, arrêt en cours.</translation> </message> <message> <source>Error recovering public key.</source> <translation>Erreur à la récupération de la clé publique.</translation> </message> <message> <source>Error</source> <translation>Erreur</translation> </message> <message> <source>Error: Disk space is low!</source> <translation>Erreur : l'espace disque est faible !</translation> </message> <message> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Erreur : Portefeuille verrouillé, impossible de créer la transaction !</translation> </message> <message> <source>Error: You already have pending entries in the Darksend pool</source> <translation>Erreur : Vous avez déjà des entrées en attente dans la pool Darksend</translation> </message> <message> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Échec de l'écoute sur un port quelconque. Utilisez -listen=0 si vous voulez ceci.</translation> </message> <message> <source>Failed to read block</source> <translation>La lecture du bloc a échoué</translation> </message> <message> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation>Si &lt;category&gt; n'est pas indiqué, extraire toutes les données de débogage.</translation> </message> <message> <source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source> <translation>(1 = garder les méta-données de tx, par ex nom de compte et infos de paiements, 2 = supprimer méta-données)</translation> </message> <message> <source>Allow JSON-RPC connections from specified source. Valid for &lt;ip&gt; are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source> <translation>Permettre connections JSON-RPC depuis la source spécifiée. Valide pour &lt;ip&gt; sont: une IP seule (ex. 1.2.3.4), un réseau/masque (ex. 1.2.3.4/255.255.255.0) ou un réseau/CIDR (ex. 1.2.3.4/24). Ce paramétre peut être utilisé plusieurs fois.</translation> </message> <message> <source>An error occurred while setting up the RPC address %s port %u for listening: %s</source> <translation>Une erreur est survenue lors du réglage RPC avec l'adresse %s port %u pour écouter: %s</translation> </message> <message> <source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source> <translation>Se lier à l'adresse donnée et mettre les pairs qui se connectent en liste blanche. Utilisez la notation [hôte]:port pour l'IPv6</translation> </message> <message> <source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source> <translation>Se lier à l'adresse indiquée pour écouter des connections JSON-RPC. Utilisez la notation [hôte]:port pour l'IPv6. Ce paramètre peut être utilisée à plusieurs reprises (par défaut: se lie a toutes les interfaces)</translation> </message> <message> <source>Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</source> <translation>Change le comportement d'un vote de budget finalisé automatique. mode=auto: Vote uniquement pour le budget finalisé qui correspond a mon budget généré. (string, par défaut : auto)</translation> </message> <message> <source>Continuously rate-limit free transactions to &lt;n&gt;*1000 bytes per minute (default:%u)</source> <translation>Limiter continuellement les transactions gratuites à &lt;n&gt;*1000 octets par minute (par défaut : %u)</translation> </message> <message> <source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source> <translation>Créer les nouveaux fichiers avec les permissions systèmes par défaut, au lieu du umask 077 (utile seulement si le wallet est désactivé)</translation> </message> <message> <source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source> <translation>Effacer toutes les transactions du portefeuille et récupère celle qui font partie de la chaine de blocs via -rescan au démarrage</translation> </message> <message> <source>Disable all Mtucicoin specific functionality (Masternodes, Darksend, InstantX, Budgeting) (0-1, default: %u)</source> <translation>Désactivez toutes les fonctionnalités liées à Mtucicoin (Masternode, Darksend, InstantX, Budgetisation) (0-1, par défaut: %u)</translation> </message> <message> <source>Distributed under the MIT software license, see the accompanying file COPYING or &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</source> <translation>Distribué sous la licence logicielle MIT, voir le fichier joint COPYING ou &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</translation> </message> <message> <source>Enable instantx, show confirmations for locked transactions (bool, default: %s)</source> <translation>Activer instantx, montrer les confirmations pour les transactions verrouillées (bool, par defaut: %s)</translation> </message> <message> <source>Enable use of automated darksend for funds stored in this wallet (0-1, default: %u)</source> <translation>Activer l'utilisation automatique de Darksend pour les fonds stockés dans ce portefeuille (0-1, défaut: %u)</translation> </message> <message> <source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source> <translation>Erreur: Paramètre obsolète -socks. Il n'est plus possible d'indiquer la version SOCKS, seul les proxy SOCKS5 sont supportés.</translation> </message> <message> <source>Fees (in MTUCICOIN/Kb) smaller than this are considered zero fee for relaying (default: %s)</source> <translation>Les frais (en MTUCICOIN/ko) inférieurs à ce seuil sont considérés comme nuls pour le relayage (par défaut : %s)</translation> </message> <message> <source>Fees (in MTUCICOIN/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source> <translation>Les frais (en MTUCICOIN/ko) inférieurs à ce seuil sont considérés comme nuls pour la création de transactions (par défaut : %s)</translation> </message> <message> <source>Flush database activity from memory pool to disk log every &lt;n&gt; megabytes (default: %u)</source> <translation>Purger l’activité de la base de données de la zone de mémoire vers le journal sur disque tous les &lt;n&gt; mégaoctets (par défaut : %u)</translation> </message> <message> <source>Found unconfirmed denominated outputs, will wait till they confirm to continue.</source> <translation>Détection de sorties dénominées non confirmées, attente de leur confirmation pour continuer.</translation> </message> <message> <source>How thorough the block verification of -checkblocks is (0-4, default: %u)</source> <translation>Degré de profondeur de la vérification des blocs -checkblocks (0-4, par défaut : %u)</translation> </message> <message> <source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source> <translation>Si paytxfee n'est pas indiqué, inclure assez de frais pour que les transactions commencent leur confirmation en moyenne dans les n blocs (par défaut : %u)</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source> <translation>Montant invalide pour -maxtxfee=&lt;montant&gt; : « %s » (doit être au moins du montant de frais minrelay de %s pour éviter des transactions bloquées)</translation> </message> <message> <source>Log transaction priority and fee per kB when mining blocks (default: %u)</source> <translation>Lors du minage, journaliser la priorité des transactions et les frais par ko (par défaut : %u) </translation> </message> <message> <source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source> <translation>Maintenir un index complet des transactions, utilisé par l'appel rpc getrawtransaction (par défaut : %u)</translation> </message> <message> <source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source> <translation>Taille maximale des données dans les transactions support de données que l'on relaye et mine (par défaut : %u)</translation> </message> <message> <source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source> <translation>Frais totaux maximum pour une transaction portefeuille unique, si trop bas, risque d'annulation pour transactions trop volumineuses (par défaut : %s)</translation> </message> <message> <source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source> <translation>Délai en secondes de refus de reconnexion pour les pairs présentant un mauvais comportement (par défaut : %u)</translation> </message> <message> <source>Output debugging information (default: %u, supplying &lt;category&gt; is optional)</source> <translation>Extraire les informations de débogage (par défaut : %u, fournir &lt;category&gt; est optionnel)</translation> </message> <message> <source>Provide liquidity to Darksend by infrequently mixing coins on a continual basis (0-100, default: %u, 1=very frequent, high fees, 100=very infrequent, low fees)</source> <translation>Fournir des liquidités à Darksend en mélangeant occasionnellement mais régulièrement des pièces (0-100, par défaut : %u, 1=très fréquent, frais élevés, 100=très rare, frais bas)</translation> </message> <message> <source>Require high priority for relaying free or low-fee transactions (default:%u)</source> <translation>Priorité haute requise pour relayer les transactions à frais modiques ou nuls (par défaut : %u)</translation> </message> <message> <source>Send trace/debug info to console instead of debug.log file (default: %u)</source> <translation>Envoyer les informations de débogage/trace vers la console au lieu du fichier debug.log (par défaut: %u)</translation> </message> <message> <source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source> <translation>Définir la limite processeur définissant quand la génération est en fonction (-1 = illimité, par défaut : %d)</translation> </message> <message> <source>Show N confirmations for a successfully locked transaction (0-9999, default: %u)</source> <translation>Afficher N confirmations for une transaction verrouillée réussie (0-9999, default : %u)</translation> </message> <message> <source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit &lt;https://www.openssl.org/&gt; and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source> <translation>Ce produit comprend des logiciels développés par le projet OpenSSL afin d'être utilisés dans la boîte à outils OpenSSL &lt;https://www.openssl.org/&gt;, un logiciel de chiffrement écrit par Eric Young et un logiciel UPnP développé par Thomas Bernard.</translation> </message> <message> <source>To use mtucicoind, or the -server option to mtucicoin-qt, you must set an rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=mtucicoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s "Mtucicoin Alert" [email protected] </source> <translation>Pour utiliser mtucicoind, ou le paramètre -server de mtucicoin-qt, vous devez définir un rpc mot de passe dans le fichier de configuration: %s Il est recommandé que vous utilisiez ce mot de passe aléatoire: rpcuser=mtucicoinrpc rpcpassword=%s (Vous ne devez pas vous souvenir de ce mot de passe) Le nom d'utilisateur et le mot de passe NE DOIVENT PAS être équivalent. Si le fichier n'existe pas, créé le avec les permissions de lecture uniquement pour le propriétaire. Il est recommandé de régler alertnotify pour que vous soyez averti des problèmes; Pour exemple: alertnotify=echo %%s | mail -s "Alerte Mtucicoin" [email protected] </translation> </message> <message> <source>Unable to locate enough funds for this transaction that are not equal 1000 MTUCICOIN.</source> <translation>Impossible de localiser suffisamment de fonds pour cette transaction qui ne sont pas égaux à 1000 MTUCICOIN.</translation> </message> <message> <source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source> <translation>Utiliser un serveur proxy SOCKS5 séparé pour atteindre les pairs par les services cachés de Tor (par défaut : %s)</translation> </message> <message> <source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source> <translation>Attention : -maxtxfee est réglée sur un montant très élevé ! Il s'agit des frais de transaction que vous payerez si vous envoyez une transaction.</translation> </message> <message> <source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Mtucicoin Core will not work properly.</source> <translation>Attention : Veuillez vérifier que la date et l'heure de votre ordinateur sont justes ! Si votre horloge n'est pas à l'heure, Mtucicoin Core ne fonctionnera pas correctement.</translation> </message> <message> <source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source> <translation>Pairs en liste blanche qui se connectent via le masque réseau ou adresse IP. Peut être spécifié de multiples fois.</translation> </message> <message> <source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source> <translation>Pairs en liste blanche ne peuvent être bannis pour DoS et leurs transactions sont toujours relayées, même si elles sont déjà en mémoire, utile par ex. pour une passerelle</translation> </message> <message> <source>(9999 could be used only on mainnet)</source> <translation>(9999 n'est utilisable que sur mainnet)</translation> </message> <message> <source>(default: %s)</source> <translation>(par défaut: %s)</translation> </message> <message> <source>&lt;category&gt; can be: </source> <translation>&lt;category&gt; peut être : </translation> </message> <message> <source>Accept public REST requests (default: %u)</source> <translation>Accepter les requetes REST publiques (par défaut: %u)</translation> </message> <message> <source>Acceptable ciphers (default: %s)</source> <translation>Chiffrements acceptables (par défaut: %s)</translation> </message> <message> <source>Always query for peer addresses via DNS lookup (default: %u)</source> <translation>Toujours requêter via recherche DNS pour des adresses de pairs (par défaut: %u)</translation> </message> <message> <source>Cannot resolve -whitebind address: '%s'</source> <translation>Impossible de résoudre l'adresse -whitebind : « %s »</translation> </message> <message> <source>Connect through SOCKS5 proxy</source> <translation>Connexion à travers un serveur mandataire SOCKS5</translation> </message> <message> <source>Connect to KeePassHttp on port &lt;port&gt; (default: %u)</source> <translation>Connecter à KeePassHttp sur le port &lt;port&gt; (par défaut: %u)</translation> </message> <message> <source>Copyright (C) 2009-%i The Bitcoin Core Developers</source> <translation>Copyright (C) 2009-%i The Bitcoin Core Developers</translation> </message> <message> <source>Copyright (C) 2014-%i The Mtucicoin Core Developers</source> <translation>Copyright (C) 2014-%i The Mtucicoin Core Developers</translation> </message> <message> <source>Could not parse -rpcbind value %s as network address</source> <translation>Impossible d'analyser la valeur -rpcbind %s en tant qu'adresse réseau</translation> </message> <message> <source>Darksend is idle.</source> <translation>Darksend est inactif.</translation> </message> <message> <source>Darksend request complete:</source> <translation>Requête Darksend complète :</translation> </message> <message> <source>Darksend request incomplete:</source> <translation>Requête Darksend incomplète.</translation> </message> <message> <source>Disable safemode, override a real safe mode event (default: %u)</source> <translation>Désactiver le mode sans échec, passer outre un événement sans échec réel (par défaut : %u)</translation> </message> <message> <source>Enable the client to act as a masternode (0-1, default: %u)</source> <translation>Autoriser le client à agir en tant que masternode (0-1, par défaut : %u)</translation> </message> <message> <source>Error connecting to Masternode.</source> <translation>Erreur de connexion au masternode.</translation> </message> <message> <source>Error loading wallet.dat: Wallet requires newer version of Mtucicoin Core</source> <translation>Erreur au chargement de wallet.dat : le Portefeuille nécessite une nouvelle version de Mtucicoin Core</translation> </message> <message> <source>Error: A fatal internal error occured, see debug.log for details</source> <translation>Erreur: Une erreur interne fatale est survenue, voir debug.log pour les détails</translation> </message> <message> <source>Error: Can't select current denominated inputs</source> <translation>Erreur: Impossible de selectionner les entrées denommées</translation> </message> <message> <source>Error: Unsupported argument -tor found, use -onion.</source> <translation>Erreur: Paramètre -tor non supporté, utilisez -onion.</translation> </message> <message> <source>Fee (in MTUCICOIN/kB) to add to transactions you send (default: %s)</source> <translation>Frais (en MTUCICOIN/ko) à ajouter aux transactions que vous envoyez (par défaut: %s)</translation> </message> <message> <source>Finalizing transaction.</source> <translation>Finalisation de la transaction.</translation> </message> <message> <source>Force safe mode (default: %u)</source> <translation>Forcer le mode sans échec (par défaut : %u)</translation> </message> <message> <source>Found enough users, signing ( waiting %s )</source> <translation>Nombre suffisant d'utilisateurs trouvé, signature ( attente %s )</translation> </message> <message> <source>Found enough users, signing ...</source> <translation>Nombre suffisant d'utilisateurs trouvé, signature ...</translation> </message> <message> <source>Generate coins (default: %u)</source> <translation>Générer des pièces (défaut : %u)</translation> </message> <message> <source>How many blocks to check at startup (default: %u, 0 = all)</source> <translation>Nombre de blocs à vérifier au démarrage (par défaut : %u, 0 = tous)</translation> </message> <message> <source>Importing...</source> <translation>Importation...</translation> </message> <message> <source>Imports blocks from external blk000??.dat file</source> <translation>Importe des blocs depuis un fichier blk000??.dat externe</translation> </message> <message> <source>Include IP addresses in debug output (default: %u)</source> <translation>Inclure les adresses IP dans la sortie debug (par défaut: %u)</translation> </message> <message> <source>Incompatible mode.</source> <translation>Mode incompatible.</translation> </message> <message> <source>Incompatible version.</source> <translation>Version incompatible.</translation> </message> <message> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation>Bloc de genèse incorrect ou introuvable. Mauvais répertoire de données pour le réseau ?</translation> </message> <message> <source>Information</source> <translation>Informations</translation> </message> <message> <source>Initialization sanity check failed. Mtucicoin Core is shutting down.</source> <translation>Les tests de cohérences lors de l'initialisation ont échoués. Mtucicoin Core est en cours de fermeture.</translation> </message> <message> <source>Input is not valid.</source> <translation>L'entrée est invalide.</translation> </message> <message> <source>InstantX options:</source> <translation>Options InstantX :</translation> </message> <message> <source>Insufficient funds.</source> <translation>Fonds insuffisants</translation> </message> <message> <source>Invalid -onion address: '%s'</source> <translation>Adresse -onion invalide : « %s »</translation> </message> <message> <source>Invalid -proxy address: '%s'</source> <translation>Adresse -proxy invalide : « %s »</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s'</source> <translation>Montant invalide pour -maxtxfee=&lt;montant&gt; : « %s »</translation> </message> <message> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: '%s'</source> <translation>Montant invalide pour -minrelayfee=&lt;montant&gt; : « %s »</translation> </message> <message> <source>Invalid amount for -mintxfee=&lt;amount&gt;: '%s'</source> <translation>Montant invalide pour -mintxfee=&lt;montant&gt; : « %s »</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s' (must be at least %s)</source> <translation>Montant invalide pour -paytxfee=&lt;montant&gt; : « %s » (minimum possible: %s)</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s'</source> <translation>Montant invalide pour -paytxfee=&lt;montant&gt; : « %s »</translation> </message> <message> <source>Last successful Darksend action was too recent.</source> <translation>La dernière action Darksend réussie est trop récente.</translation> </message> <message> <source>Limit size of signature cache to &lt;n&gt; entries (default: %u)</source> <translation>Limiter la taille du cache des signatures à &lt;n&gt; entrées (par défaut : %u)</translation> </message> <message> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Écouter les connexions JSON-RPC sur &lt;port&gt; (par défaut : %u ou tesnet : %u)</translation> </message> <message> <source>Listen for connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Écouter les connexions sur &lt;port&gt; (par défaut: %u ou testnet: %u)</translation> </message> <message> <source>Loading budget cache...</source> <translation>Chargement du cache de budget...</translation> </message> <message> <source>Loading masternode cache...</source> <translation>Chargement du cache de masternode...</translation> </message> <message> <source>Loading masternode payment cache...</source> <translation>Chargement du cache de paiement masternode...</translation> </message> <message> <source>Lock is already in place.</source> <translation>Verrou déjà en place.</translation> </message> <message> <source>Lock masternodes from masternode configuration file (default: %u)</source> <translation>Vérouiller les masternodes depuis le fichier de configuration masternode (par défaut : %u)</translation> </message> <message> <source>Maintain at most &lt;n&gt; connections to peers (default: %u)</source> <translation>Garder au plus &lt;n&gt; connexions avec les pairs (par défaut : %u)</translation> </message> <message> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Tampon maximal de réception par connexion, &lt;n&gt;*1000 octets (par défaut : %u)</translation> </message> <message> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Tampon maximal d'envoi par connexion », &lt;n&gt;*1000 octets (par défaut : %u)</translation> </message> <message> <source>Mixing in progress...</source> <translation>Mélange en cours...</translation> </message> <message> <source>Need to specify a port with -whitebind: '%s'</source> <translation>Un port doit être spécifié avec -whitebind: '%s'</translation> </message> <message> <source>No Masternodes detected.</source> <translation>Aucun Masternode détecté.</translation> </message> <message> <source>No compatible Masternode found.</source> <translation>Aucun Masternode compatible trouvé.</translation> </message> <message> <source>Not in the Masternode list.</source> <translation>Absent de la liste des Masternodes.</translation> </message> <message> <source>Number of automatic wallet backups (default: 10)</source> <translation>Nombre de sauvegarde automatique de portefeuille (par défaut : 10)</translation> </message> <message> <source>Only accept block chain matching built-in checkpoints (default: %u)</source> <translation>N'accepter qu'une chaîne de blocs correspondant aux points de vérification intégrés (par défaut : %u)</translation> </message> <message> <source>Only connect to nodes in network &lt;net&gt; (ipv4, ipv6 or onion)</source> <translation>Se connecter uniquement aux nœuds du réseau &lt;net&gt; (IPv4, IPv6 ou onion)</translation> </message> <message> <source>Prepend debug output with timestamp (default: %u)</source> <translation>Ajouter l'horodatage au début de la sortie de débogage (par défaut : %u)</translation> </message> <message> <source>Run a thread to flush wallet periodically (default: %u)</source> <translation>Exécuter une tâche pour purger le portefeuille périodiquement (par défaut : %u) </translation> </message> <message> <source>Send trace/debug info to debug.log file (default: %u)</source> <translation>Envoyer les informations de débogage/trace au fichier debug.log (par défaut: %u)</translation> </message> <message> <source>Send transactions as zero-fee transactions if possible (default: %u)</source> <translation>N'envoyer que des transactions sans frais si possible (par défaut : %u)</translation> </message> <message> <source>Server certificate file (default: %s)</source> <translation>Fichier de certification du serveur (par défaut : %s)</translation> </message> <message> <source>Server private key (default: %s)</source> <translation>Clef privée du serveur (par défaut : %s)</translation> </message> <message> <source>Set external address:port to get to this masternode (example: %s)</source> <translation>Définir une adresse:port externe pour accéder à ce masternode (exemple : %s)</translation> </message> <message> <source>Set key pool size to &lt;n&gt; (default: %u)</source> <translation>Définir la taille de la réserve de clefs à &lt;n&gt; (par défaut : %u)</translation> </message> <message> <source>Set minimum block size in bytes (default: %u)</source> <translation>Définir la taille de bloc minimale en octets (par défaut : %u)</translation> </message> <message> <source>Set the number of threads to service RPC calls (default: %d)</source> <translation>Définir le nombre de fils d’exécution pour desservir les appels RPC (par défaut : %d)</translation> </message> <message> <source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source> <translation>Définit le drapeau DB_PRIVATE dans l'environnement de la BD du portefeuille (par défaut : %u)</translation> </message> <message> <source>Signing timed out.</source> <translation>Signature expirée.</translation> </message> <message> <source>Specify configuration file (default: %s)</source> <translation>Définir le fichier de configuration (par défaut : %s)</translation> </message> <message> <source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source> <translation>Spécifier le délai d'expiration de la connexion en millisecondes (minimum : 1, par défaut : %d)</translation> </message> <message> <source>Specify masternode configuration file (default: %s)</source> <translation>Définir le fichier de configuration du masternode (par défaut : %s)</translation> </message> <message> <source>Specify pid file (default: %s)</source> <translation>Définir le fichier pid (défaut : %s)</translation> </message> <message> <source>Spend unconfirmed change when sending transactions (default: %u)</source> <translation>Dépenser la monnaie non confirmée lors de l'envoi de transactions (par défaut : %u)</translation> </message> <message> <source>Stop running after importing blocks from disk (default: %u)</source> <translation>Arrêter après l'importation des blocs du disque (par défaut : %u)</translation> </message> <message> <source>Submitted following entries to masternode: %u / %d</source> <translation>Les entrées suivantes ont été envoyées au masternode: %u / %d</translation> </message> <message> <source>Submitted to masternode, waiting for more entries ( %u / %d ) %s</source> <translation>Envoyé au masternode, en attente d'entrées supplémentaires ( %u / %d ) %s</translation> </message> <message> <source>Submitted to masternode, waiting in queue %s</source> <translation>Soumis au masternode, dans la file d'attente %s</translation><|fim▁hole|> <translation>La synchronisation a échouée</translation> </message> <message> <source>Synchronization finished</source> <translation>La synchronisation est terminée</translation> </message> <message> <source>Synchronizing budgets...</source> <translation>Synchronisation des budgets...</translation> </message> <message> <source>Synchronizing masternode winners...</source> <translation>Synchronisation des masternodes vainqueurs...</translation> </message> <message> <source>Synchronizing masternodes...</source> <translation>Synchronisation des masternodes...</translation> </message> <message> <source>Synchronizing sporks...</source> <translation>Synchronisation des sporks...</translation> </message> <message> <source>This is not a Masternode.</source> <translation>Ceci n'est pas un masternode.</translation> </message> <message> <source>Threshold for disconnecting misbehaving peers (default: %u)</source> <translation>Seuil de déconnexion des pairs présentant un mauvais comportement (par défaut : %u)</translation> </message> <message> <source>Use KeePass 2 integration using KeePassHttp plugin (default: %u)</source> <translation>Utiliser l'intégration KeePass 2 en utilisant le greffon KeePassHttp (par défaut : %u)</translation> </message> <message> <source>Use N separate masternodes to anonymize funds (2-8, default: %u)</source> <translation>Utiliser N masternodes différents pour anonymiser les fonds (2-8, par défaut : %u)</translation> </message> <message> <source>Use UPnP to map the listening port (default: %u)</source> <translation>Utiliser l'UPnP pour mapper le port d'écoute (par défaut : %u)</translation> </message> <message> <source>Wallet needed to be rewritten: restart Mtucicoin Core to complete</source> <translation>Le portefeuille devait être réécrit : redémarrer Mtucicoin Core pour terminer l'opération.</translation> </message> <message> <source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source> <translation>Attention : l'argument obsolète -benchmark a été ignoré, utiliser -debug=bench</translation> </message> <message> <source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source> <translation>Attention : l'argument obsolète -debugnet a été ignoré, utiliser -debug=net</translation> </message> <message> <source>Will retry...</source> <translation>Va réessayer ...</translation> </message> <message> <source>Invalid masternodeprivkey. Please see documenation.</source> <translation>masternodeprivkey invalide. Veuillez vous référer à la documentation.</translation> </message> <message> <source>(must be 9999 for mainnet)</source> <translation>(doit être 9999 pour mainnet)</translation> </message> <message> <source>Can't find random Masternode.</source> <translation>Masternode aléatoire introuvable.</translation> </message> <message> <source>Can't mix while sync in progress.</source> <translation>Ne peux pas mélanger pendant la synchronisation.</translation> </message> <message> <source>Could not parse masternode.conf</source> <translation>Impossible d'analyser masternode.conf</translation> </message> <message> <source>Invalid netmask specified in -whitelist: '%s'</source> <translation>Masque de réseau inconnu spécifié sur -whitelist : « %s »</translation> </message> <message> <source>Invalid port detected in masternode.conf</source> <translation>Port non valide détecté dans masternode.conf</translation> </message> <message> <source>Invalid private key.</source> <translation>Clé privée invalide.</translation> </message> <message> <source>Invalid script detected.</source> <translation>Script invalide détecté.</translation> </message> <message> <source>KeePassHttp id for the established association</source> <translation>Id KeePassHttp pour l'association établie</translation> </message> <message> <source>KeePassHttp key for AES encrypted communication with KeePass</source> <translation>Clé KeePassHttp pour la communication chiffrée AES avec KeePass</translation> </message> <message> <source>Keep N MTUCICOIN anonymized (default: %u)</source> <translation>Maintenir N mtucicoin anonymisé en permanence (défaut: %u)</translation> </message> <message> <source>Keep at most &lt;n&gt; unconnectable transactions in memory (default: %u)</source> <translation>Garder au plus &lt;n&gt; transactions sans connexion en mémoire (par défaut : %u)</translation> </message> <message> <source>Last Darksend was too recent.</source> <translation>Le dernier Darksend est trop récent.</translation> </message> <message> <source>Line: %d</source> <translation>Ligne: %d</translation> </message> <message> <source>Loading addresses...</source> <translation>Chargement des adresses...</translation> </message> <message> <source>Loading block index...</source> <translation>Chargement de l’index des blocs...</translation> </message> <message> <source>Loading wallet... (%3.2f %%)</source> <translation>Chargement du portefeuille... (%3.2f %%)</translation> </message> <message> <source>Loading wallet...</source> <translation>Chargement du portefeuille...</translation> </message> <message> <source>Masternode options:</source> <translation>Options Masternode :</translation> </message> <message> <source>Masternode queue is full.</source> <translation>La file d'attente du masternode est pleine.</translation> </message> <message> <source>Masternode:</source> <translation>Masternode :</translation> </message> <message> <source>Missing input transaction information.</source> <translation>Informations de transaction entrante manquantes.</translation> </message> <message> <source>No funds detected in need of denominating.</source> <translation>Aucuns fonds détectés nécessitant une dénomination.</translation> </message> <message> <source>No matching denominations found for mixing.</source> <translation>Pas de dénominations équivalentes trouvées pour le mélange.</translation> </message> <message> <source>Node relay options:</source> <translation>Options de noeud de relais:</translation> </message> <message> <source>Non-standard public key detected.</source> <translation>Clé publique non standard détectée.</translation> </message> <message> <source>Not compatible with existing transactions.</source> <translation>Non compatible avec les transactions existantes.</translation> </message> <message> <source>Not enough file descriptors available.</source> <translation>Pas assez de descripteurs de fichiers de disponibles.</translation> </message> <message> <source>Options:</source> <translation>Options :</translation> </message> <message> <source>Password for JSON-RPC connections</source> <translation>Mot de passe pour les connexions JSON-RPC</translation> </message> <message> <source>RPC SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>Options RPC SSL : (voir le wiki Bitcoin pour les instructions de configuration de SSL)</translation> </message> <message> <source>RPC server options:</source> <translation>Options du serveur RPC :</translation> </message> <message> <source>RPC support for HTTP persistent connections (default: %d)</source> <translation>Support RPC pour connections HTTP persistantes (par défaut : %d)</translation> </message> <message> <source>Randomly drop 1 of every &lt;n&gt; network messages</source> <translation>Abandonner aléatoirement 1 message du réseau sur &lt;n&gt;</translation> </message> <message> <source>Randomly fuzz 1 of every &lt;n&gt; network messages</source> <translation>Tester aléatoirement 1 message du réseau sur &lt;n&gt;</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Reconstruire l'index de la chaîne de blocs à partir des fichiers blk000??.dat courants</translation> </message> <message> <source>Receive and display P2P network alerts (default: %u)</source> <translation>Recevoir et afficher les alertes réseau P2P (par défaut : %u)</translation> </message> <message> <source>Relay and mine data carrier transactions (default: %u)</source> <translation>Relayer et miner les transactions de support de données (par défaut : %u)</translation> </message> <message> <source>Relay non-P2SH multisig (default: %u)</source> <translation>Relayer les multisig non-P2SH (par défaut : %u)</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions</source> <translation>Réanalyser la chaîne de blocs pour les transactions de portefeuille manquantes</translation> </message> <message> <source>Rescanning...</source> <translation>Nouvelle analyse...</translation> </message> <message> <source>Run in the background as a daemon and accept commands</source> <translation>Fonctionner en arrière-plan en tant que démon et accepter les commandes</translation> </message> <message> <source>Session not complete!</source> <translation>Session incomplète!</translation> </message> <message> <source>Session timed out.</source> <translation>Session expirée.</translation> </message> <message> <source>Set database cache size in megabytes (%d to %d, default: %d)</source> <translation>Définir la taille du cache de la base de données en mégaoctets (%d to %d, default: %d)</translation> </message> <message> <source>Set maximum block size in bytes (default: %d)</source> <translation>Définir la taille minimale de bloc en octets (par défaut : %d)</translation> </message> <message> <source>Set the masternode private key</source> <translation>Définir la clé privée du masternode</translation> </message> <message> <source>Show all debugging options (usage: --help -help-debug)</source> <translation>Montrer toutes les options de débogage (utilisation : --help --help-debug)</translation> </message> <message> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Réduire le fichier debug.log lors du démarrage du client (par défaut : 1 lorsque -debug n'est pas présent)</translation> </message> <message> <source>Signing failed.</source> <translation>La signature a échoué.</translation> </message> <message> <source>Signing transaction failed</source> <translation>La signature de la transaction a échoué</translation> </message> <message> <source>Specify data directory</source> <translation>Spécifier le répertoire de données</translation> </message> <message> <source>Specify wallet file (within data directory)</source> <translation>Spécifiez le fichier de portefeuille (dans le répertoire de données)</translation> </message> <message> <source>Specify your own public address</source> <translation>Spécifier votre propre adresse publique</translation> </message> <message> <source>Synchronization pending...</source> <translation>Synchronisation en suspens...</translation> </message> <message> <source>This help message</source> <translation>Ce message d'aide</translation> </message> <message> <source>This is experimental software.</source> <translation>Ceci est un logiciel expérimental.</translation> </message> <message> <source>This is intended for regression testing tools and app development.</source> <translation>Ceci est à l'intention des outils de test de régression et du développement applicatif.</translation> </message> <message> <source>Transaction amount too small</source> <translation>Montant de la transaction trop bas</translation> </message> <message> <source>Transaction amounts must be positive</source> <translation>Les montants de transaction doivent être positifs</translation> </message> <message> <source>Transaction created successfully.</source> <translation>Transaction créée avec succès.</translation> </message> <message> <source>Transaction fees are too high.</source> <translation>Les frais de transaction sont trop élevés.</translation> </message> <message> <source>Transaction not valid.</source> <translation>Transaction invalide.</translation> </message> <message> <source>Transaction too large for fee policy</source> <translation>La transaction est trop volumineuse pour les règles de frais en vigueur</translation> </message> <message> <source>Transaction too large</source> <translation>Transaction trop volumineuse</translation> </message> <message> <source>Transmitting final transaction.</source> <translation>Transmission de la transaction finale.</translation> </message> <message> <source>Unable to bind to %s on this computer (bind returned error %s)</source> <translation>Impossible de se lier à %s sur cet ordinateur (erreur bind retournée %s)</translation> </message> <message> <source>Unable to sign spork message, wrong key?</source> <translation>Impossible de signer le message spork, mauvaise clé?</translation> </message> <message> <source>Unknown network specified in -onlynet: '%s'</source> <translation>Réseau inconnu spécifié sur -onlynet : « %s »</translation> </message> <message> <source>Unknown state: id = %u</source> <translation>État inconnu: id = %u</translation> </message> <message> <source>Upgrade wallet to latest format</source> <translation>Mettre à niveau le portefeuille vers le format le plus récent</translation> </message> <message> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utiliser OpenSSL (https) pour les connexions JSON-RPC</translation> </message> <message> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Utiliser l'UPnP pour mapper le port d'écoute (par défaut : 1 lors de l'écoute)</translation> </message> <message> <source>Use the test network</source> <translation>Utiliser le réseau de test</translation> </message> <message> <source>Username for JSON-RPC connections</source> <translation>Nom d'utilisateur pour les connexions JSON-RPC</translation> </message> <message> <source>Value more than Darksend pool maximum allows.</source> <translation>Valeur supérieure au maximum autorisé par le pool.</translation> </message> <message> <source>Verifying blocks...</source> <translation>Vérification des blocs en cours...</translation> </message> <message> <source>Verifying wallet...</source> <translation>Vérification du portefeuille en cours...</translation> </message> <message> <source>Wallet %s resides outside data directory %s</source> <translation>Le portefeuille %s réside en dehors du répertoire de données %s</translation> </message> <message> <source>Wallet is locked.</source> <translation>Le Portefeuille est verrouillé.</translation> </message> <message> <source>Wallet options:</source> <translation>Options du portefeuille :</translation> </message> <message> <source>Wallet window title</source> <translation>Titre de la fenêtre du portefeuille</translation> </message> <message> <source>Warning</source> <translation>Avertissement</translation> </message> <message> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Avertissement : cette version est obsolète, une mise à niveau est nécessaire !</translation> </message> <message> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation>Vous devez reconstruire la base de données en utilisant -reindex afin de modifier -txindex</translation> </message> <message> <source>Your entries added successfully.</source> <translation>Vos entrées ajoutées avec succès.</translation> </message> <message> <source>Your transaction was accepted into the pool!</source> <translation>Votre transaction a été acceptée dans la pool!</translation> </message> <message> <source>Zapping all transactions from wallet...</source> <translation>Supprimer toutes les transactions du portefeuille...</translation> </message> <message> <source>on startup</source> <translation>au démarrage</translation> </message> <message> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat corrompu, la récupération a échoué</translation> </message> </context> </TS><|fim▁end|>
</message> <message> <source>Synchronization failed</source>
<|file_name|>job_cache_anonymous_houseprint.py<|end_file_name|><|fim▁begin|><|fim▁hole|>""" Script to cache anonymous houseprint data into hp_anonymous.pkl Created on 05/07/2014 by Roel De Coninck """ import os, sys import inspect script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # add the path to opengrid to sys.path sys.path.append(os.path.join(script_dir, os.pardir, os.pardir)) from opengrid.library.houseprint import Houseprint ############################################################################## hp = Houseprint() all_sensordata = hp.get_all_fluksosensors() print('Sensor data fetched') hp.save('/usr/local/src/opengrid/scripts/hp_anonymous.pkl') hp.save('/var/www/private/hp_anonymous.pkl')<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>angular2-socket.io.ts<|end_file_name|><|fim▁begin|>import { Injectable } from 'angular2/core';<|fim▁hole|> @Injectable() export class NgSocketIO { socket: any; constructor() { this.socket = io.connect(); } public on(event : string) { return Rx.Observable.fromEvent(this.socket, event); } }<|fim▁end|>
import * as Rx from 'rxjs/Rx'; declare var io;
<|file_name|>polySum.py<|end_file_name|><|fim▁begin|>''' Week-2:Exercise-grader-polysum A regular polygon has n number of sides. Each side has length s. The area of a regular polygon is: (0.25∗n∗s^2)/tan(π/n) The perimeter of a polygon is: length of the boundary of the polygon Write a function called polysum that takes 2 arguments, n and s. This function should sum the area and square of the perimeter of the regular polygon. The function returns the sum, rounded to 4 decimal places. ''' #code import math<|fim▁hole|>def polysum(n,s): ''' Input: n - number of sides(should be an integer) s- length of each sides(can be an intger or a float) Output: Returns Sum of area and the square of the perimeter of the regular polygon(gives a float) ''' #Code def areaOfPolygon(n,s): #Pi = 3.1428 area = (0.25 * n * s ** 2)/math.tan(math.pi/n) return area def perimeterOfPolygon(n,s): perimeter = n * s return perimeter sum = areaOfPolygon(n,s) + (perimeterOfPolygon(n,s) ** 2) return round(sum,4)<|fim▁end|>
<|file_name|>uritests.cpp<|end_file_name|><|fim▁begin|>#include "uritests.h" #include "../guiutil.h" #include "../walletmodel.h" #include <QUrl> void URITests::uriTests() { SendCoinsRecipient rv; QUrl uri; uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?req-dontexist=")); QVERIFY(!GUIUtil::parseBitcoinURI(uri, &rv)); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?dontexist=")); QVERIFY(GUIUtil::parseBitcoinURI(uri, &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.label == QString()); QVERIFY(rv.amount == 0); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?label=Wikipedia Example Address")); QVERIFY(GUIUtil::parseBitcoinURI(uri, &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.label == QString("Wikipedia Example Address")); QVERIFY(rv.amount == 0); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?amount=0.001")); QVERIFY(GUIUtil::parseBitcoinURI(uri, &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.label == QString()); QVERIFY(rv.amount == 100000); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?amount=1.001")); QVERIFY(GUIUtil::parseBitcoinURI(uri, &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.label == QString()); QVERIFY(rv.amount == 100100000); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?amount=100&label=Wikipedia Example")); QVERIFY(GUIUtil::parseBitcoinURI(uri, &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.amount == 10000000000LL); QVERIFY(rv.label == QString("Wikipedia Example")); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?message=Wikipedia Example Address")); QVERIFY(GUIUtil::parseBitcoinURI(uri, &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.label == QString()); QVERIFY(GUIUtil::parseBitcoinURI("mehcoin://LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?message=Wikipedia Example Address", &rv)); QVERIFY(rv.address == QString("LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9")); QVERIFY(rv.label == QString());<|fim▁hole|> // We currently don't implement the message parameter (ok, yea, we break spec...) uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?req-message=Wikipedia Example Address")); QVERIFY(!GUIUtil::parseBitcoinURI(uri, &rv)); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?amount=1,000&label=Wikipedia Example")); QVERIFY(!GUIUtil::parseBitcoinURI(uri, &rv)); uri.setUrl(QString("mehcoin:LQDPC5rbjDB72fGFVHu4enYhxGAZuRiFh9?amount=1,000.0&label=Wikipedia Example")); QVERIFY(!GUIUtil::parseBitcoinURI(uri, &rv)); }<|fim▁end|>
<|file_name|>Workforce.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>// ============================== var Workforce = function () { if (this.constructor === Workforce) { throw new Error("You cannot instantiate an abstract class!"); } }; Workforce.prototype.assemblePC = function () { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setMotherboard = function (motherboard) { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setCpu = function (cpu) { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setRam = function (ram) { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setSsd = function (ssd) { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setNic = function (nic) { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setPowerSupply = function (powerSupply) { throw new Error("You cannot call an abstract method!"); }; Workforce.prototype.setCaseDesign = function (caseDesign) { throw new Error("You cannot call an abstract method!"); }; module.exports = Workforce;<|fim▁end|>
// ============================== // ABSTRACT PC BUILDER
<|file_name|>webpack.js<|end_file_name|><|fim▁begin|>const gulp = require('gulp'); const spawn = require('../lib/spawn'); const config = require('../config'); gulp.task('webpack', (callback) => { if (config.context === 'production') { process.env.NODE_ENV = 'production'; } process.env.WEBPACK_CONTEXT = config.context; const options = []; if (config.context === 'watch') { options.push('-w'); } spawn('node_modules/.bin/webpack', options, callback);<|fim▁hole|><|fim▁end|>
});
<|file_name|>conv_gpu_depthwise.d.ts<|end_file_name|><|fim▁begin|>import { Conv2DInfo } from '../../ops/conv_util'; import { GPGPUProgram } from './gpgpu_math'; export declare class DepthwiseConv2DProgram implements GPGPUProgram { variableNames: string[];<|fim▁hole|> outputShape: number[]; userCode: string; constructor(convInfo: Conv2DInfo); }<|fim▁end|>
<|file_name|>validation.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. use tests::helpers::{serve_hosts, request}; #[test] fn should_reject_invalid_host() { // given let server = serve_hosts(Some(vec!["localhost:8080".into()])); // when let response = request(server, "\ GET / HTTP/1.1\r\n\ Host: 127.0.0.1:8080\r\n\ Connection: close\r\n\ \r\n\ {} " ); // then assert_eq!(response.status, "HTTP/1.1 403 Forbidden".to_owned()); assert!(response.body.contains("Current Host Is Disallowed"), response.body); } #[test] fn should_allow_valid_host() { // given let server = serve_hosts(Some(vec!["localhost:8080".into()])); // when let response = request(server, "\ GET /ui/ HTTP/1.1\r\n\ Host: localhost:8080\r\n\ Connection: close\r\n\ \r\n\ {}<|fim▁hole|> " ); // then assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); } #[test] fn should_serve_dapps_domains() { // given let server = serve_hosts(Some(vec!["localhost:8080".into()])); // when let response = request(server, "\ GET / HTTP/1.1\r\n\ Host: ui.parity\r\n\ Connection: close\r\n\ \r\n\ {} " ); // then assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); } #[test] // NOTE [todr] This is required for error pages to be styled properly. fn should_allow_parity_utils_even_on_invalid_domain() { // given let server = serve_hosts(Some(vec!["localhost:8080".into()])); // when let response = request(server, "\ GET /parity-utils/styles.css HTTP/1.1\r\n\ Host: 127.0.0.1:8080\r\n\ Connection: close\r\n\ \r\n\ {} " ); // then assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); } #[test] fn should_not_return_cors_headers_for_rpc() { // given let server = serve_hosts(Some(vec!["localhost:8080".into()])); // when let response = request(server, "\ POST /rpc HTTP/1.1\r\n\ Host: localhost:8080\r\n\ Origin: null\r\n\ Content-Type: application/json\r\n\ Connection: close\r\n\ \r\n\ {} " ); // then assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert!( !response.headers_raw.contains("Access-Control-Allow-Origin"), "CORS headers were not expected: {:?}", response.headers ); }<|fim▁end|>
<|file_name|>QueryVisitorFieldArgumentEnvironment.java<|end_file_name|><|fim▁begin|>package graphql.analysis; import graphql.PublicApi; import graphql.language.Argument; import graphql.language.Node; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLSchema; import graphql.util.TraverserContext; import java.util.Map; @PublicApi public interface QueryVisitorFieldArgumentEnvironment { GraphQLSchema getSchema(); <|fim▁hole|> Argument getArgument(); Object getArgumentValue(); Map<String, Object> getVariables(); QueryVisitorFieldEnvironment getParentEnvironment(); TraverserContext<Node> getTraverserContext(); }<|fim▁end|>
GraphQLFieldDefinition getFieldDefinition(); GraphQLArgument getGraphQLArgument();
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os import random import time from flask import Flask, request, render_template, session, flash, redirect, \ url_for, jsonify from flask.ext.mail import Mail, Message from flask.ext.sqlalchemy import SQLAlchemy from celery import Celery app = Flask(__name__) app.config['SECRET_KEY'] = 'top-secret!' # Flask-Mail configuration app.config['MAIL_SERVER'] = 'smtp.googlemail.com' app.config['MAIL_PORT'] = 587 app.config['MAIL_USE_TLS'] = True app.config['MAIL_USERNAME'] = "[email protected]" app.config['MAIL_PASSWORD'] = "1Alzkdpf*^^*go" app.config['MAIL_DEFAULT_SENDER'] = '[email protected]' # Celery configuration app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0' app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0' app.config.from_object(os.environ['APP_SETTINGS']) db = SQLAlchemy(app) from .models import MapInfo # Initialize extensions mail = Mail(app) # Initialize Celery celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL']) celery.conf.update(app.config) @celery.task def send_async_email(msg): """Background task to send an email with Flask-Mail.""" with app.app_context(): mail.send(msg) @celery.task(bind=True) def long_task(self): """Background task that runs a long function with progress reports.""" verb = ['Starting up', 'Booting', 'Repairing', 'Loading', 'Checking'] adjective = ['master', 'radiant', 'silent', 'harmonic', 'fast'] noun = ['solar array', 'particle reshaper', 'cosmic ray', 'orbiter', 'bit'] message = '' total = random.randint(10, 50) for i in range(total): if not message or random.random() < 0.25: message = '{0} {1} {2}...'.format(random.choice(verb), random.choice(adjective), random.choice(noun)) self.update_state(state='PROGRESS', meta={'current': i, 'total': total, 'status': message}) time.sleep(1) return {'current': 100, 'total': 100, 'status': 'Task completed!', 'result': 42} @app.route('/', methods=['GET', 'POST']) def index(): if request.method == 'GET': return render_template('index.html', email=session.get('email', '')) email = request.form['email'] session['email'] = email # send the email msg = Message('Hello from Flask', recipients=[request.form['email']]) msg.body = 'This is a test email sent from a background Celery task.' if request.form['submit'] == 'Send': # send right away send_async_email.delay(msg) flash('Sending email to {0}'.format(email)) else: # send in one minute send_async_email.apply_async(args=[msg], countdown=60) flash('An email will be sent to {0} in one minute'.format(email)) return redirect(url_for('index')) @app.route('/longtask', methods=['POST']) def longtask(): task = long_task.apply_async() return jsonify({}), 202, {'Location': url_for('taskstatus', task_id=task.id)} @app.route('/status/<task_id>') def taskstatus(task_id): task = long_task.AsyncResult(task_id) if task.state == 'PENDING': response = { 'state': task.state, 'current': 0, 'total': 1, 'status': 'Pending...' } elif task.state != 'FAILURE': response = { 'state': task.state, 'current': task.info.get('current', 0), 'total': task.info.get('total', 1), 'status': task.info.get('status', '') } if 'result' in task.info: response['result'] = task.info['result'] else: # something went wrong in the background job response = { 'state': task.state, 'current': 1, 'total': 1, 'status': str(task.info), # this is the exception raised } return jsonify(response) if __name__ == '__main__':<|fim▁hole|><|fim▁end|>
app.run(debug=True)
<|file_name|>test_extension.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import absolute_import, division, print_function, unicode_literals import os import subprocess import sys from ....tests.helper import pytest def test_wcsapi_extension(tmpdir): # Test that we can build a simple C extension with the astropy.wcs C API setup_path = os.path.dirname(__file__) astropy_path = os.path.abspath( os.path.join(setup_path, '..', '..', '..', '..')) env = os.environ.copy() paths = [str(tmpdir), astropy_path] if env.get('PYTHONPATH'): paths.append(env.get('PYTHONPATH')) env[str('PYTHONPATH')] = str(os.pathsep.join(paths)) # Build the extension # This used to use subprocess.check_call, but on Python 3.4 there was # a mysterious Heisenbug causing this to fail with a non-zero exit code # *unless* the output is redirected. This bug also did not occur in an # interactive session, so it likely had something to do with pytest's # output capture p = subprocess.Popen([sys.executable, 'setup.py', 'install', '--install-lib={0}'.format(tmpdir), astropy_path], cwd=setup_path, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Whether the process fails or not this isn't likely to produce a great # deal of output so communicate should be fine in almost all cases stdout, stderr = p.communicate() try: stdout, stderr = stdout.decode('utf8'), stderr.decode('utf8') except UnicodeDecodeError: # Don't try to guess about encoding; just display the text stdout, stderr = stdout.decode('latin1'), stderr.decode('latin1') # If compilation fails, we can skip this test, since the # dependencies necessary to compile an extension may be missing. # If it passes, however, we want to continue and ensure that the # extension created is actually usable. However, if we're on # Travis-CI, or another generic continuous integration setup, we # don't want to ever skip, because having it fail in that # environment probably indicates something more serious that we # want to know about. if (not (str('CI') in os.environ or<|fim▁hole|> str('TRAVIS') in os.environ or str('CONTINUOUS_INTEGRATION') in os.environ) and p.returncode): pytest.skip("system unable to compile extensions") return assert p.returncode == 0, ( "setup.py exited with non-zero return code {0}\n" "stdout:\n\n{1}\n\nstderr:\n\n{2}\n".format( p.returncode, stdout, stderr)) code = """ import sys import wcsapi_test sys.exit(wcsapi_test.test()) """ code = code.strip().replace('\n', '; ') # Import and run the extension subprocess.check_call([sys.executable, '-c', code], env=env)<|fim▁end|>
<|file_name|>space_map_disk.rs<|end_file_name|><|fim▁begin|>use anyhow::Result; use crate::pdata::btree_builder::*; use crate::pdata::space_map::*; use crate::pdata::space_map_common::*; use crate::write_batcher::*; //------------------------------------------ pub fn write_disk_sm(w: &mut WriteBatcher, sm: &dyn SpaceMap) -> Result<SMRoot> { let (index_entries, ref_count_root) = write_common(w, sm)?; let mut index_builder: BTreeBuilder<IndexEntry> = BTreeBuilder::new(Box::new(NoopRC {}));<|fim▁hole|> } let bitmap_root = index_builder.complete(w)?; w.flush()?; Ok(SMRoot { nr_blocks: sm.get_nr_blocks()?, nr_allocated: sm.get_nr_allocated()?, bitmap_root, ref_count_root, }) } //------------------------------------------<|fim▁end|>
for (i, ie) in index_entries.iter().enumerate() { index_builder.push_value(w, i as u64, *ie)?;
<|file_name|>generic.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # -*- coding: utf-8 -*- #----------------------------------------------------------------------------- # Name: Generic.py # Purpose: # Author: Fabien Marteau <[email protected]> # Created: 21/05/2008 #----------------------------------------------------------------------------- # Copyright (2008) Armadeus Systems # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # #----------------------------------------------------------------------------- # Revision list : # # Date By Changes # #----------------------------------------------------------------------------- __doc__ = "" __version__ = "1.0.0" __author__ = "Fabien Marteau <[email protected]>" import re from periphondemand.bin.utils.wrapperxml import WrapperXml from periphondemand.bin.utils.error import Error DESTINATION = ["fpga","driver","both"] PUBLIC = ["true","false"] class Generic(WrapperXml): """ Manage generic instance value """ def __init__(self,parent,**keys): """ init Generic, __init__(self,parent,node) __init__(self,parent,nodestring)<|fim▁hole|> """ self.parent=parent if "node" in keys: self.__initnode(keys["node"]) elif "nodestring" in keys: self.__initnodestring(keys["nodestring"]) elif "name" in keys: self.__initname(keys["name"]) else: raise Error("Keys unknown in Generic init()",0) def __initnode(self,node): WrapperXml.__init__(self,node=node) def __initnodestring(self,nodestring): WrapperXml.__init__(self,nodestring=nodestring) def __initname(self,name): WrapperXml.__init__(self,nodename="generic") self.setName(name) def getOp(self): return self.getAttributeValue("op") def setOp(self,op): self.setAttribute("op",op) def getTarget(self): return self.getAttributeValue("target") def setTarget(self,target): self.setAttribute("target",target) def isPublic(self): if self.getAttributeValue("public")=="true": return "true" else: return "false" def setPublic(self,public): public = public.lower() if not public in PUBLIC: raise Error("Public value "+str(public)+" wrong") self.setAttribute("public",public) def getType(self): the_type = self.getAttributeValue("type") if the_type == None: raise Error("Generic "+self.getName()+\ " description malformed, type must be defined",0) else: return the_type def setType(self,type): self.setAttribute("type",type) def getMatch(self): try: return self.getAttributeValue("match").encode("utf-8") except AttributeError: return None def setMatch(self,match): self.setAttribute("match",match) def getValue(self): """ return the generic value """ component = self.getParent() if self.getOp() == None: return self.getAttributeValue("value") else: target = self.getTarget().split(".") if self.getOp() == "realsizeof": # return the number of connected pin return str(int(component.getInterface(target[0]).getPort(target[1]).getMaxPinNum())+1) else: raise Error("Operator unknown "+self.getOp(),1) def setValue(self,value): if self.getMatch() == None: self.setAttribute("value",value) elif re.compile(self.getMatch()).match(value): self.setAttribute("value",value) else: raise Error("Value doesn't match for attribute "+str(value),0) def getDestination(self): """ return the generic destination (fpga,driver or both) """ return self.getAttributeValue("destination") def setDestination(self,destination): destination = destination.lower() if not destination in DESTINATION: raise Error("Destination value "+str(destination)+\ " unknown") self.setAttribute("destination",destination)<|fim▁end|>
__init__(self,parent,name)
<|file_name|>weeklyRecurrenceSpec.js<|end_file_name|><|fim▁begin|>var deps = [ 'common-ui/angular', 'test/karma/unit/angular-directives/templateUtil', 'common-ui/angular-ui-bootstrap', 'angular-mocks', 'common-ui/angular-directives/recurrence/recurrence' ]; define(deps, function (angular, templateUtil) { describe('weeklyRecurrence', function () { var $scope, httpBackend, templateCache; beforeEach(module('recurrence', 'dateTimePicker')); beforeEach(inject(function ($rootScope, $httpBackend, $templateCache) { $scope = $rootScope; httpBackend = $httpBackend; templateCache = $templateCache; templateUtil.addTemplate("common-ui/angular-directives/recurrence/weekly.html", httpBackend, templateCache); templateUtil.addTemplate("common-ui/angular-directives/dateTimePicker/dateTimePicker.html", httpBackend, templateCache); })); describe('weekly', function () { var scope, $compile; var element; beforeEach(inject(function (_$rootScope_, _$compile_) { scope = _$rootScope_; $compile = _$compile_; })); afterEach(function () { element = scope = $compile = undefined; }); describe('with static panels', function () { beforeEach(function () { var tpl = "<div weekly weekly-label='Run every week on' start-label='Start' until-label='Until' no-end-label='No end date' end-by-label='End by' weekly-recurrence-info='model'></div>"; element = angular.element(tpl); $compile(element)(scope); scope.$digest(); //Set model to initially have 2 days check along with dates set scope.model.startTime = new Date(); scope.model.endTime = new Date(); scope.model.daysOfWeek = [0, 6]; scope.$apply(); }); afterEach(function () { element.remove(); }); it('rehydrated model should reflect initial changes', function () { expect(scope.model.daysOfWeek.length).toEqual(2); expect(scope.model.endTime).toBeDefined(); expect(scope.model.startTime).toBeDefined(); //Grab first checkbox and un-check it element.find('input.SUN').click(); //ensure that the first checkbox is unchecked expect(scope.model.daysOfWeek.length).toBe(1); //Grab last checkbox and un-check it element.find('input.SAT').click(); //ensure that the last checkbox is unchecked expect(scope.model.daysOfWeek.length).toBe(0); }); it('should create weekly panel with content', function () { expect(element.attr('weekly')).toBeDefined(); expect(element.attr('weekly-label')).toEqual("Run every week on"); expect(element.attr('start-label')).toEqual("Start"); expect(element.attr('until-label')).toEqual("Until"); expect(element.attr('no-end-label')).toEqual("No end date"); expect(element.attr('end-by-label')).toEqual("End by"); }); it('clicking checkbox set model on the scope', function () { //Grab checkbox and check it element.find('input.MON').click(); //ensure that the second checkbox is checked expect(scope.model.daysOfWeek.length).toBe(3); //Grab checkbox and check it element.find('input.TUES').click(); //ensure that the third checkbox is checked expect(scope.model.daysOfWeek.length).toBe(4); //Grab checkbox and check it element.find('input.WED').click(); //ensure that the fourth checkbox is checked expect(scope.model.daysOfWeek.length).toBe(5); //Grab checkbox and check it element.find('input.THURS').click(); //ensure that the fifth checkbox is checked expect(scope.model.daysOfWeek.length).toBe(6); //Grab checkbox and check it element.find('input.FRI').click(); //ensure that the sixth checkbox is checked expect(scope.model.daysOfWeek.length).toBe(7); //Click checkbox again to deselect it element.find('input.WED').click(); //ensure that the seventh checkbox is deselected expect(scope.model.daysOfWeek.length).toBe(6); }); it('clicking radio button should update something', function () { }); describe("start datetime directive initialization from scope", function () { var startDatetime, isolateScope; beforeEach(function () { startDatetime = angular.element(element.find('div')[1]); isolateScope = startDatetime.isolateScope(); }); it('should update model on the scope', function () { //Change the start hour to 10 AM isolateScope.hour = 10; isolateScope.tod = "AM"; //Change the start minute to :00 isolateScope.minute = 59; scope.$apply(); expect(scope.model.startTime.getHours()).toBe(10); expect(scope.model.startTime.getMinutes()).toBe(59); var myDate = new Date(2014,4,1,23,15,0); isolateScope.selectedDate = myDate; scope.$apply(); expect(scope.model.startTime).toBe(myDate); }); }); }); describe("Weekly validation tests", function () { var $myscope, isolateScope; beforeEach(inject(function ($rootScope, $compile) { var tpl = "<div weekly weekly-label='Run every week on' start-label='Start' until-label='Until' no-end-label='No end date' end-by-label='End by' weekly-recurrence-info='model'></div>"; $myscope = $rootScope.$new(); element = angular.element(tpl); $compile(element)($myscope); $myscope.$digest(); // get the isolate scope from the element isolateScope = element.isolateScope(); $myscope.model = {}; $myscope.$apply(); })); afterEach(function () { element.remove(); }); it("should not be valid by default", function () { var v = isolateScope.isValid(); expect(v).toBeFalsy(); }); it("should have at least one day selected considered valid", function () { expect(isolateScope.isValid()).toBeFalsy(); // this should be defaulted to the current date/time expect(isolateScope.startDate).toBeDefined(); expect(isolateScope.startDate).toBeLessThan(new Date()); element.find("input.SUN").click(); expect(isolateScope.isValid()).toBeTruthy(); element.find("input.SUN").click(); expect(isolateScope.isValid()).toBeFalsy(); element.find("input.SUN").click(); // turn it back on expect(isolateScope.isValid()).toBeTruthy(); }); it("should have an end date after the start date to be valid", function () { expect(isolateScope.isValid()).toBeFalsy(); element.find("input.SUN").click(); isolateScope.data.endDateDisabled = false; // select the end date radio isolateScope.endDate = new Date(); expect(isolateScope.isValid()).toBeTruthy(); isolateScope.endDate = "2014-04-01"; // before now and a string version of the date expect(isolateScope.isValid()).toBeFalsy(); }); it("should hydrate from strings for start and end time", function () { $myscope.model = { startTime: "2014-04-01", endTime: "2014-04-02" }; $myscope.$apply(); element.find("input.SUN").click(); isolateScope.data.endDateDisabled = false; // select the end date radio expect(isolateScope.isValid()).toBeTruthy(); }); }); describe("Weekly validation tests", function() { var $myscope, isolateScope; beforeEach(inject(function ($rootScope, $compile) { var tpl = "<div weekly weekly-label='Run every week on' start-label='Start' until-label='Until' no-end-label='No end date' end-by-label='End by' weekly-recurrence-info='model'></div>"; $myscope = $rootScope.$new(); element = angular.element(tpl); $compile(element)($myscope); $myscope.$digest(); // get the isolate scope from the element isolateScope = element.isolateScope(); $myscope.model = {}; $myscope.$apply(); })); afterEach(function() { element.remove(); }); it("should not be valid by default", function() { var v = isolateScope.isValid(); expect(v).toBeFalsy(); }); it("should have at least one day selected considered valid", function() { expect(isolateScope.isValid()).toBeFalsy(); // this should be defaulted to the current date/time expect(isolateScope.startDate).toBeDefined(); expect(isolateScope.startDate).toBeLessThan(new Date()); element.find("input.SUN").click(); expect(isolateScope.isValid()).toBeTruthy(); element.find("input.SUN").click(); expect(isolateScope.isValid()).toBeFalsy(); element.find("input.SUN").click(); // turn it back on expect(isolateScope.isValid()).toBeTruthy(); }); it("should have an end date after the start date to be valid", function() { expect(isolateScope.isValid()).toBeFalsy(); element.find("input.SUN").click(); isolateScope.data.endDateDisabled = false; // select the end date radio isolateScope.endDate = new Date(); expect(isolateScope.isValid()).toBeTruthy(); isolateScope.endDate = "2014-04-01"; // before now and a string version of the date expect(isolateScope.isValid()).toBeFalsy(); }); it("should hydrate from strings for start and end time", function() { $myscope.model = { startTime: "2014-04-01", endTime: "2014-04-02" }; $myscope.$apply(); element.find("input.SUN").click(); isolateScope.data.endDateDisabled = false; // select the end date radio expect(isolateScope.isValid()).toBeTruthy(); });<|fim▁hole|> }); }); });<|fim▁end|>
});
<|file_name|>9c92c85163a9_events.py<|end_file_name|><|fim▁begin|>"""events Revision ID: 9c92c85163a9 Revises: 666668eae682 Create Date: 2016-05-09 19:04:44.498817 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = '9c92c85163a9' down_revision = '666668eae682' def upgrade(): op.create_table('event', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('origin', sa.Unicode(), nullable=True), sa.Column('data', postgresql.JSONB(), nullable=True), sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id') ) op.drop_table('processing_log') def downgrade(): op.create_table('processing_log', sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('id', sa.BIGINT(), nullable=False), sa.Column('operation', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('component', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('source_location', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('content_hash', sa.VARCHAR(length=65), autoincrement=False, nullable=True), sa.Column('foreign_id', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('source_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('document_id', sa.BIGINT(), autoincrement=False, nullable=True),<|fim▁hole|> sa.Column('error_type', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('error_message', sa.VARCHAR(), autoincrement=False, nullable=True), sa.Column('error_details', sa.VARCHAR(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name=u'processing_log_pkey') ) op.drop_table('event')<|fim▁end|>
sa.Column('meta', postgresql.JSONB(), autoincrement=False, nullable=True),
<|file_name|>UnserializableTargetObjectTest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2005 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, * CA 95054 USA or visit www.sun.com if you need additional information or * have any questions. */ /* * @test * @bug 6332962 * @summary Test that a RequiredModelMBean operation can have a targetObject * that is not serializable * @author Eamonn McManus * @run clean UnserializableTargetObjectTest * @run build UnserializableTargetObjectTest * @run main UnserializableTargetObjectTest */ /* This test and DescriptorSupportSerialTest basically cover the same thing. I wrote them at different times and forgot that I had written the earlier one. However the coverage is slightly different so I'm keeping both. */ import java.lang.reflect.Method; import javax.management.Attribute; import javax.management.Descriptor; import javax.management.MBeanServer; import javax.management.MBeanServerConnection; import javax.management.MBeanServerFactory; import javax.management.ObjectName; import javax.management.modelmbean.DescriptorSupport; import javax.management.modelmbean.ModelMBean; import javax.management.modelmbean.ModelMBeanAttributeInfo; import javax.management.modelmbean.ModelMBeanInfo; import javax.management.modelmbean.ModelMBeanInfoSupport; import javax.management.modelmbean.ModelMBeanOperationInfo; import javax.management.modelmbean.RequiredModelMBean; import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; import javax.management.remote.JMXConnectorServer; import javax.management.remote.JMXConnectorServerFactory; import javax.management.remote.JMXServiceURL; public class UnserializableTargetObjectTest { public static class Resource { // not serializable! int count; int operationCount; public void operation() { operationCount++; } public int getCount() { return count; } public void setCount(int count) { this.count = count; } } public static void main(String[] args) throws Exception { MBeanServer mbs = MBeanServerFactory.newMBeanServer(); ObjectName name = new ObjectName("a:b=c"); Resource resource1 = new Resource(); Resource resource2 = new Resource();<|fim▁hole|> Descriptor operationDescriptor = new DescriptorSupport(new String[] { "descriptorType", "name", "targetObject" }, new Object[] { "operation", "operation", resource1 }); Descriptor getCountDescriptor = new DescriptorSupport(new String[] { "descriptorType", "name", "targetObject" }, new Object[] { "operation", "getCount", resource2 }); Descriptor setCountDescriptor = new DescriptorSupport(new String[] { "descriptorType", "name", "targetObject" }, new Object[] { "operation", "setCount", resource2 }); Descriptor countDescriptor = new DescriptorSupport(new String[] { "descriptorType", "name", "getMethod", "setMethod" }, new Object[] { "attribute", "Count", "getCount", "setCount" }); ModelMBeanOperationInfo operationInfo = new ModelMBeanOperationInfo("operation description", operationMethod, operationDescriptor); ModelMBeanOperationInfo getCountInfo = new ModelMBeanOperationInfo("getCount description", getCountMethod, getCountDescriptor); ModelMBeanOperationInfo setCountInfo = new ModelMBeanOperationInfo("setCount description", setCountMethod, setCountDescriptor); ModelMBeanAttributeInfo countInfo = new ModelMBeanAttributeInfo("Count", "Count description", getCountMethod, setCountMethod, countDescriptor); ModelMBeanInfo mmbi = new ModelMBeanInfoSupport(Resource.class.getName(), "ModelMBean to test targetObject", new ModelMBeanAttributeInfo[] {countInfo}, null, // no constructors new ModelMBeanOperationInfo[] { operationInfo, getCountInfo, setCountInfo }, null); // no notifications ModelMBean mmb = new RequiredModelMBean(mmbi); mmb.setManagedResource(resource3, "ObjectReference"); mbs.registerMBean(mmb, name); mbs.invoke(name, "operation", null, null); mbs.setAttribute(name, new Attribute("Count", 53)); if (resource1.operationCount != 1) throw new Exception("operationCount: " + resource1.operationCount); if (resource2.count != 53) throw new Exception("count: " + resource2.count); int got = (Integer) mbs.getAttribute(name, "Count"); if (got != 53) throw new Exception("got count: " + got); JMXServiceURL url = new JMXServiceURL("rmi", null, 0); JMXConnectorServer cs = JMXConnectorServerFactory.newJMXConnectorServer(url, null, mbs); cs.start(); JMXServiceURL addr = cs.getAddress(); JMXConnector cc = JMXConnectorFactory.connect(addr); MBeanServerConnection mbsc = cc.getMBeanServerConnection(); ModelMBeanInfo rmmbi = (ModelMBeanInfo) mbsc.getMBeanInfo(name); // Above gets NotSerializableException if resource included in // serialized form cc.close(); cs.stop(); System.out.println("TEST PASSED"); } }<|fim▁end|>
Resource resource3 = new Resource(); Method operationMethod = Resource.class.getMethod("operation"); Method getCountMethod = Resource.class.getMethod("getCount"); Method setCountMethod = Resource.class.getMethod("setCount", int.class);
<|file_name|>speed.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ## Part of the pyprimes.py package. ## ## Copyright © 2014 Steven D'Aprano. ## See the file __init__.py for the licence terms for this software. """\ ===================================== Timing the speed of primes algorithms ===================================== """ from __future__ import division import sys from itertools import islice # Conditionally hack the PYTHONPATH. if __name__ == '__main__': import os path = os.path.dirname(__file__) parent, here = os.path.split(path) sys.path.append(parent) from pyprimes.compat23 import next import pyprimes.awful as awful import pyprimes.probabilistic as probabilistic import pyprimes.sieves as sieves <|fim▁hole|> def __init__(self, timer=None): if timer is None: from timeit import default_timer as timer self.timer = timer self.reset() def reset(self): """Reset all the collected timer results.""" try: del self._start except AttributeError: pass self._elapsed = 0.0 def start(self): """Start the timer.""" self._start = self.timer() def stop(self): """Stop the timer.""" t = self.timer() self._elapsed = t - self._start del self._start @property def elapsed(self): return self._elapsed def trial(generator, count, repeat=1): timer = Stopwatch() best = YEAR100 for i in range(repeat): it = generator() timer.reset() timer.start() # Go to the count-th prime as fast as possible. p = next(islice(it, count-1, count)) timer.stop() best = min(best, timer.elapsed) return best def run(generators, number, repeat=1): print ("Calculating speeds for first %d primes..." % number) template = "\r ...%d of %d %s" heading = """\ Generator Elapsed Speed (sec) (primes/sec) ==============================================================""" records = [] timer = Stopwatch() # For measuring the total elapsed time. timer.start() N = len(generators) for i, generator in enumerate(generators): name = generator.__module__ + '.' + generator.__name__ sys.stdout.write((template % (i+1, N, name)).ljust(69)) sys.stdout.flush() t = trial(generator, number, repeat) records.append((number/t, t, name)) timer.stop() sys.stdout.write("\r%-69s\n" % "Done!") print ('Total elapsed time: %.1f seconds' % timer.elapsed) print ('') records.sort() print (heading) for speed, elapsed, name in records: print ("%-36s %4.2f %8.1f" % (name, elapsed, speed)) print ('==============================================================\n') VERY_SLOW = [awful.primes0, awful.primes1, awful.primes2, awful.turner] SLOW = [awful.primes3, awful.primes4, probabilistic.primes] FAST = [sieves.cookbook, sieves.croft, sieves.sieve, sieves.wheel] MOST = SLOW + FAST ALL = VERY_SLOW + MOST run(VERY_SLOW + SLOW, 1000) run([awful.primes3, awful.trial_division], 5000) #run([awful.primes3, awful.trial_division], 50000) #run([awful.primes3, awful.trial_division], 100000) #run([awful.primes3, awful.trial_division], 200000) exit() run(ALL, 500, 3) run(MOST, 10000) run(FAST, 1000000) """ Python 2.6 or better import multiprocessing import time # bar def bar(): for i in range(100): print "Tick" time.sleep(1) if __name__ == '__main__': # Start bar as a process p = multiprocessing.Process(target=bar) p.start() # Wait for 10 seconds or until process finishes p.join(10) # If thread is still active if p.is_alive(): print "running... let's kill it..." # Terminate p.terminate() p.join() """ """ Unix only, Python 2.5 or better. In [1]: import signal # Register an handler for the timeout In [2]: def handler(signum, frame): ...: print "Forever is over!" ...: raise Exception("end of time") ...: # This function *may* run for an indetermined time... In [3]: def loop_forever(): ...: import time ...: while 1: ...: print "sec" ...: time.sleep(1) ...: ...: # Register the signal function handler In [4]: signal.signal(signal.SIGALRM, handler) Out[4]: 0 # Define a timeout for your function In [5]: signal.alarm(10) Out[5]: 0 In [6]: try: ...: loop_forever() ...: except Exception, exc: ...: print exc ....: sec sec sec sec sec sec sec sec Forever is over! end of time # Cancel the timer if the function returned before timeout # (ok, mine won't but yours maybe will :) In [7]: signal.alarm(0) Out[7]: 0 """<|fim▁end|>
YEAR100 = 100*365*24*60*60 # One hundred years, in seconds. class Stopwatch(object):
<|file_name|>NeedsPermission.java<|end_file_name|><|fim▁begin|>package org.jokar.permissiondispatcher.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /**<|fim▁hole|> * Created by JokAr on 16/8/22. */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.CLASS) public @interface NeedsPermission { String[] value(); }<|fim▁end|>
* Register some methods which permissions are needed.
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
"""Tests for the input_boolean component."""
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use communication::Message; pub use self::counter::Counter; <|fim▁hole|>/// The pullable design may need to be upgraded: right now there is no obvious connection between /// subsequent calls to pull; although multiple calls may produce the same time, they don't need to /// and defensive implementations must constantly check this. This complicates data exchange, which /// may conservatively over-flush, if the defensive implementation isn't well done (e.g. now). /// An alternate design is for a Pullable<T, D> to return a (&T, Session<D>), where Session<D> is a /// new type implementing Iterator<Item=Message<D>>, or Iterator<Item=D>, or PullableSession<D>, or /// something like that. Ideally, the Session<D> notice how many records are consumed, and only /// treats those res pub trait Pullable<T, D> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)>; } impl<T, D, P: ?Sized + Pullable<T, D>> Pullable<T, D> for Box<P> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)> { (**self).pull() } }<|fim▁end|>
pub mod counter;
<|file_name|>MainNavigation.js<|end_file_name|><|fim▁begin|>'use strict'; const fs = require('fs'); const remote = require('electron').remote; const mainProcess = remote.require('./main'); module.exports = { template: ` <v-list dense class="pt-0"> <v-list-tile to="recent-projects" :router="true"> <v-list-tile-action> <v-icon>list</v-icon> </v-list-tile-action> <v-list-tile-content> <v-list-tile-title id="recent-projects">Recent projects</v-list-tile-title> </v-list-tile-content> </v-list-tile> <v-list-tile @click="existingProject"> <v-list-tile-action> <v-icon>folder_open</v-icon> </v-list-tile-action> <v-list-tile-content> <v-list-tile-title id="open-project">Add existing project</v-list-tile-title> </v-list-tile-content> </v-list-tile> <v-list-tile to="create-project" :router="true"> <v-list-tile-action> <v-icon>create_new_folder</v-icon> </v-list-tile-action> <v-list-tile-content> <v-list-tile-title id="create-project">Create new project</v-list-tile-title> </v-list-tile-content> </v-list-tile> <v-list-tile @click="globalProject" v-if="globalComposerFileExists"> <v-list-tile-action> <v-icon>public</v-icon> </v-list-tile-action> <v-list-tile-content> <v-list-tile-title id="global-composer">Global composer</v-list-tile-title> </v-list-tile-content><|fim▁hole|> </v-list-tile> <v-list-tile to="settings" :router="true"> <v-list-tile-action> <v-icon>settings</v-icon> </v-list-tile-action> <v-list-tile-content> <v-list-tile-title id="open-settings">Settings</v-list-tile-title> </v-list-tile-content> </v-list-tile> </v-list> `, computed: { globalComposerFileExists() { return fs.existsSync(remote.app.getPath('home') + '/.composer'); } }, methods: { existingProject: () => { mainProcess.openDirectory(); }, globalProject: () => { mainProcess.openProject(remote.app.getPath('home') + '/.composer'); }, } }<|fim▁end|>
<|file_name|>ncf_keras_main.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """NCF framework to train and evaluate the NeuMF model. The NeuMF model assembles both MF and MLP models under the NCF framework. Check `neumf_model.py` for more details about the models. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import json import os # pylint: disable=g-bad-import-order from absl import app from absl import flags from absl import logging import tensorflow.compat.v2 as tf # pylint: enable=g-bad-import-order from official.recommendation import constants as rconst from official.recommendation import movielens from official.recommendation import ncf_common from official.recommendation import ncf_input_pipeline from official.recommendation import neumf_model from official.utils.flags import core as flags_core from official.utils.misc import distribution_utils from official.utils.misc import keras_utils from official.utils.misc import model_helpers FLAGS = flags.FLAGS def metric_fn(logits, dup_mask, match_mlperf): dup_mask = tf.cast(dup_mask, tf.float32) logits = tf.slice(logits, [0, 1], [-1, -1]) in_top_k, _, metric_weights, _ = neumf_model.compute_top_k_and_ndcg( logits, dup_mask, match_mlperf) metric_weights = tf.cast(metric_weights, tf.float32) return in_top_k, metric_weights class MetricLayer(tf.keras.layers.Layer): """Custom layer of metrics for NCF model.""" def __init__(self, match_mlperf): super(MetricLayer, self).__init__() self.match_mlperf = match_mlperf def get_config(self): return {"match_mlperf": self.match_mlperf} @classmethod def from_config(cls, config, custom_objects=None): return cls(**config) def call(self, inputs, training=False): logits, dup_mask = inputs if training: hr_sum = 0.0 hr_count = 0.0 else: metric, metric_weights = metric_fn(logits, dup_mask, self.match_mlperf) hr_sum = tf.reduce_sum(metric * metric_weights) hr_count = tf.reduce_sum(metric_weights) self.add_metric(hr_sum, name="hr_sum", aggregation="mean") self.add_metric(hr_count, name="hr_count", aggregation="mean") return logits class LossLayer(tf.keras.layers.Layer): """Pass-through loss layer for NCF model.""" def __init__(self, loss_normalization_factor): # The loss may overflow in float16, so we use float32 instead. super(LossLayer, self).__init__(dtype="float32") self.loss_normalization_factor = loss_normalization_factor self.loss = tf.keras.losses.SparseCategoricalCrossentropy( from_logits=True, reduction="sum") def get_config(self): return {"loss_normalization_factor": self.loss_normalization_factor} @classmethod def from_config(cls, config, custom_objects=None): return cls(**config) def call(self, inputs): logits, labels, valid_pt_mask_input = inputs loss = self.loss( y_true=labels, y_pred=logits, sample_weight=valid_pt_mask_input) loss = loss * (1.0 / self.loss_normalization_factor) self.add_loss(loss) return logits class IncrementEpochCallback(tf.keras.callbacks.Callback): """A callback to increase the requested epoch for the data producer. The reason why we need this is because we can only buffer a limited amount of data. So we keep a moving window to represent the buffer. This is to move the one of the window's boundaries for each epoch. """ def __init__(self, producer): self._producer = producer def on_epoch_begin(self, epoch, logs=None): self._producer.increment_request_epoch() class CustomEarlyStopping(tf.keras.callbacks.Callback): """Stop training has reached a desired hit rate.""" def __init__(self, monitor, desired_value): super(CustomEarlyStopping, self).__init__() self.monitor = monitor self.desired = desired_value self.stopped_epoch = 0 def on_epoch_end(self, epoch, logs=None): current = self.get_monitor_value(logs) if current and current >= self.desired: self.stopped_epoch = epoch self.model.stop_training = True def on_train_end(self, logs=None): if self.stopped_epoch > 0: print("Epoch %05d: early stopping" % (self.stopped_epoch + 1)) def get_monitor_value(self, logs): logs = logs or {} monitor_value = logs.get(self.monitor) if monitor_value is None: logging.warning("Early stopping conditioned on metric `%s` " "which is not available. Available metrics are: %s", self.monitor, ",".join(list(logs.keys()))) return monitor_value def _get_keras_model(params): """Constructs and returns the model.""" batch_size = params["batch_size"] user_input = tf.keras.layers.Input( shape=(1,), name=movielens.USER_COLUMN, dtype=tf.int32) item_input = tf.keras.layers.Input( shape=(1,), name=movielens.ITEM_COLUMN, dtype=tf.int32) valid_pt_mask_input = tf.keras.layers.Input( shape=(1,), name=rconst.VALID_POINT_MASK, dtype=tf.bool) dup_mask_input = tf.keras.layers.Input( shape=(1,), name=rconst.DUPLICATE_MASK, dtype=tf.int32) label_input = tf.keras.layers.Input( shape=(1,), name=rconst.TRAIN_LABEL_KEY, dtype=tf.bool) base_model = neumf_model.construct_model(user_input, item_input, params) logits = base_model.output zeros = tf.keras.layers.Lambda( lambda x: x * 0)(logits) softmax_logits = tf.keras.layers.concatenate( [zeros, logits], axis=-1) # Custom training loop calculates loss and metric as a part of # training/evaluation step function. if not params["keras_use_ctl"]: softmax_logits = MetricLayer( params["match_mlperf"])([softmax_logits, dup_mask_input]) # TODO(b/134744680): Use model.add_loss() instead once the API is well # supported. softmax_logits = LossLayer(batch_size)( [softmax_logits, label_input, valid_pt_mask_input]) keras_model = tf.keras.Model( inputs={ movielens.USER_COLUMN: user_input, movielens.ITEM_COLUMN: item_input, rconst.VALID_POINT_MASK: valid_pt_mask_input, rconst.DUPLICATE_MASK: dup_mask_input, rconst.TRAIN_LABEL_KEY: label_input}, outputs=softmax_logits) keras_model.summary() return keras_model def run_ncf(_): """Run NCF training and eval with Keras.""" keras_utils.set_session_config(enable_xla=FLAGS.enable_xla) if FLAGS.seed is not None: print("Setting tf seed") tf.random.set_seed(FLAGS.seed) model_helpers.apply_clean(FLAGS) if FLAGS.dtype == "fp16" and FLAGS.fp16_implementation == "keras": policy = tf.keras.mixed_precision.experimental.Policy( "mixed_float16", loss_scale=flags_core.get_loss_scale(FLAGS, default_for_fp16="dynamic")) tf.keras.mixed_precision.experimental.set_policy(policy) strategy = distribution_utils.get_distribution_strategy( distribution_strategy=FLAGS.distribution_strategy, num_gpus=FLAGS.num_gpus, tpu_address=FLAGS.tpu) params = ncf_common.parse_flags(FLAGS) params["distribute_strategy"] = strategy if params["use_tpu"] and not params["keras_use_ctl"]: logging.error("Custom training loop must be used when using TPUStrategy.") return batch_size = params["batch_size"] time_callback = keras_utils.TimeHistory(batch_size, FLAGS.log_steps) callbacks = [time_callback] producer, input_meta_data = None, None generate_input_online = params["train_dataset_path"] is None if generate_input_online: # Start data producing thread. num_users, num_items, _, _, producer = ncf_common.get_inputs(params) producer.start() per_epoch_callback = IncrementEpochCallback(producer) callbacks.append(per_epoch_callback) else: assert params["eval_dataset_path"] and params["input_meta_data_path"] with tf.io.gfile.GFile(params["input_meta_data_path"], "rb") as reader: input_meta_data = json.loads(reader.read().decode("utf-8")) num_users = input_meta_data["num_users"] num_items = input_meta_data["num_items"] params["num_users"], params["num_items"] = num_users, num_items if FLAGS.early_stopping: early_stopping_callback = CustomEarlyStopping( "val_HR_METRIC", desired_value=FLAGS.hr_threshold) callbacks.append(early_stopping_callback) (train_input_dataset, eval_input_dataset, num_train_steps, num_eval_steps) = \ (ncf_input_pipeline.create_ncf_input_data( params, producer, input_meta_data, strategy)) steps_per_epoch = None if generate_input_online else num_train_steps with distribution_utils.get_strategy_scope(strategy): keras_model = _get_keras_model(params) optimizer = tf.keras.optimizers.Adam( learning_rate=params["learning_rate"], beta_1=params["beta1"], beta_2=params["beta2"], epsilon=params["epsilon"]) if FLAGS.fp16_implementation == "graph_rewrite": optimizer = \ tf.compat.v1.train.experimental.enable_mixed_precision_graph_rewrite( optimizer, loss_scale=flags_core.get_loss_scale(FLAGS, default_for_fp16="dynamic")) elif FLAGS.dtype == "fp16" and params["keras_use_ctl"]: # When keras_use_ctl is False, instead Model.fit() automatically applies # loss scaling so we don't need to create a LossScaleOptimizer. optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer( optimizer, tf.keras.mixed_precision.experimental.global_policy().loss_scale) if params["keras_use_ctl"]: train_loss, eval_results = run_ncf_custom_training( params, strategy, keras_model, optimizer, callbacks, train_input_dataset, eval_input_dataset, num_train_steps, num_eval_steps, generate_input_online=generate_input_online) else: keras_model.compile(optimizer=optimizer, run_eagerly=FLAGS.run_eagerly) if not FLAGS.ml_perf: # Create Tensorboard summary and checkpoint callbacks. summary_dir = os.path.join(FLAGS.model_dir, "summaries") summary_callback = tf.keras.callbacks.TensorBoard(summary_dir) checkpoint_path = os.path.join(FLAGS.model_dir, "checkpoint") checkpoint_callback = tf.keras.callbacks.ModelCheckpoint( checkpoint_path, save_weights_only=True) callbacks += [summary_callback, checkpoint_callback] history = keras_model.fit( train_input_dataset, epochs=FLAGS.train_epochs, steps_per_epoch=steps_per_epoch, callbacks=callbacks, validation_data=eval_input_dataset, validation_steps=num_eval_steps, verbose=2) logging.info("Training done. Start evaluating") eval_loss_and_metrics = keras_model.evaluate( eval_input_dataset, steps=num_eval_steps, verbose=2) logging.info("Keras evaluation is done.") # Keras evaluate() API returns scalar loss and metric values from # evaluation as a list. Here, the returned list would contain # [evaluation loss, hr sum, hr count]. eval_hit_rate = eval_loss_and_metrics[1] / eval_loss_and_metrics[2] # Format evaluation result into [eval loss, eval hit accuracy]. eval_results = [eval_loss_and_metrics[0], eval_hit_rate] if history and history.history: train_history = history.history train_loss = train_history["loss"][-1] stats = build_stats(train_loss, eval_results, time_callback) return stats def run_ncf_custom_training(params, strategy, keras_model, optimizer, callbacks, train_input_dataset, eval_input_dataset, num_train_steps, num_eval_steps, generate_input_online=True): """Runs custom training loop. Args: params: Dictionary containing training parameters. strategy: Distribution strategy to be used for distributed training. keras_model: Model used for training. optimizer: Optimizer used for training. callbacks: Callbacks to be invoked between batches/epochs. train_input_dataset: tf.data.Dataset used for training. eval_input_dataset: tf.data.Dataset used for evaluation. num_train_steps: Total number of steps to run for training. num_eval_steps: Total number of steps to run for evaluation. generate_input_online: Whether input data was generated by data producer. When data is generated by data producer, then train dataset must be re-initialized after every epoch. Returns: A tuple of train loss and a list of training and evaluation results. """ loss_object = tf.keras.losses.SparseCategoricalCrossentropy( reduction="sum", from_logits=True) train_input_iterator = iter( strategy.experimental_distribute_dataset(train_input_dataset)) def train_step(train_iterator): """Called once per step to train the model.""" def step_fn(features): """Computes loss and applied gradient per replica.""" with tf.GradientTape() as tape: softmax_logits = keras_model(features) # The loss can overflow in float16, so we cast to float32. softmax_logits = tf.cast(softmax_logits, "float32") labels = features[rconst.TRAIN_LABEL_KEY] loss = loss_object( labels, softmax_logits, sample_weight=features[rconst.VALID_POINT_MASK]) loss *= (1.0 / params["batch_size"]) if FLAGS.dtype == "fp16": loss = optimizer.get_scaled_loss(loss)<|fim▁hole|> # Converting gradients to dense form helps in perf on GPU for NCF grads = neumf_model.sparse_to_dense_grads( list(zip(grads, keras_model.trainable_variables))) optimizer.apply_gradients(grads) return loss per_replica_losses = strategy.run( step_fn, args=(next(train_iterator),)) mean_loss = strategy.reduce( tf.distribute.ReduceOp.SUM, per_replica_losses, axis=None) return mean_loss def eval_step(eval_iterator): """Called once per eval step to compute eval metrics.""" def step_fn(features): """Computes eval metrics per replica.""" softmax_logits = keras_model(features) in_top_k, metric_weights = metric_fn(softmax_logits, features[rconst.DUPLICATE_MASK], params["match_mlperf"]) hr_sum = tf.reduce_sum(in_top_k * metric_weights) hr_count = tf.reduce_sum(metric_weights) return hr_sum, hr_count per_replica_hr_sum, per_replica_hr_count = ( strategy.run( step_fn, args=(next(eval_iterator),))) hr_sum = strategy.reduce( tf.distribute.ReduceOp.SUM, per_replica_hr_sum, axis=None) hr_count = strategy.reduce( tf.distribute.ReduceOp.SUM, per_replica_hr_count, axis=None) return hr_sum, hr_count if not FLAGS.run_eagerly: train_step = tf.function(train_step) eval_step = tf.function(eval_step) for callback in callbacks: callback.on_train_begin() # Not writing tensorboard summaries if running in MLPerf. if FLAGS.ml_perf: eval_summary_writer, train_summary_writer = None, None else: summary_dir = os.path.join(FLAGS.model_dir, "summaries") eval_summary_writer = tf.summary.create_file_writer( os.path.join(summary_dir, "eval")) train_summary_writer = tf.summary.create_file_writer( os.path.join(summary_dir, "train")) train_loss = 0 for epoch in range(FLAGS.train_epochs): for cb in callbacks: cb.on_epoch_begin(epoch) # As NCF dataset is sampled with randomness, not repeating # data elements in each epoch has significant impact on # convergence. As so, offline-generated TF record files # contains all epoch worth of data. Thus we do not need # to initialize dataset when reading from tf record files. if generate_input_online: train_input_iterator = iter( strategy.experimental_distribute_dataset(train_input_dataset)) train_loss = 0 for step in range(num_train_steps): current_step = step + epoch * num_train_steps for c in callbacks: c.on_batch_begin(current_step) train_loss += train_step(train_input_iterator) # Write train loss once in every 1000 steps. if train_summary_writer and step % 1000 == 0: with train_summary_writer.as_default(): tf.summary.scalar("training_loss", train_loss/(step + 1), step=current_step) for c in callbacks: c.on_batch_end(current_step) train_loss /= num_train_steps logging.info("Done training epoch %s, epoch loss=%s.", epoch + 1, train_loss) eval_input_iterator = iter( strategy.experimental_distribute_dataset(eval_input_dataset)) hr_sum = 0 hr_count = 0 for _ in range(num_eval_steps): step_hr_sum, step_hr_count = eval_step(eval_input_iterator) hr_sum += step_hr_sum hr_count += step_hr_count logging.info("Done eval epoch %s, hit_rate=%s.", epoch + 1, hr_sum / hr_count) if eval_summary_writer: with eval_summary_writer.as_default(): tf.summary.scalar("hit_rate", hr_sum / hr_count, step=current_step) if (FLAGS.early_stopping and float(hr_sum / hr_count) > params["hr_threshold"]): break for c in callbacks: c.on_train_end() # Saving the model at the end of training. if not FLAGS.ml_perf: checkpoint = tf.train.Checkpoint(model=keras_model, optimizer=optimizer) checkpoint_path = os.path.join(FLAGS.model_dir, "ctl_checkpoint") checkpoint.save(checkpoint_path) logging.info("Saving model as TF checkpoint: %s", checkpoint_path) return train_loss, [None, hr_sum / hr_count] def build_stats(loss, eval_result, time_callback): """Normalizes and returns dictionary of stats. Args: loss: The final loss at training time. eval_result: Output of the eval step. Assumes first value is eval_loss and second value is accuracy_top_1. time_callback: Time tracking callback likely used during keras.fit. Returns: Dictionary of normalized results. """ stats = {} if loss: stats["loss"] = loss if eval_result: stats["eval_loss"] = eval_result[0] stats["eval_hit_rate"] = eval_result[1] if time_callback: timestamp_log = time_callback.timestamp_log stats["step_timestamp_log"] = timestamp_log stats["train_finish_time"] = time_callback.train_finish_time if len(timestamp_log) > 1: stats["avg_exp_per_second"] = ( time_callback.batch_size * time_callback.log_steps * (len(time_callback.timestamp_log)-1) / (timestamp_log[-1].timestamp - timestamp_log[0].timestamp)) return stats def main(_): logging.info("Result is %s", run_ncf(FLAGS)) if __name__ == "__main__": ncf_common.define_ncf_flags() app.run(main)<|fim▁end|>
grads = tape.gradient(loss, keras_model.trainable_variables) if FLAGS.dtype == "fp16": grads = optimizer.get_unscaled_gradients(grads)
<|file_name|>StringPrintWriter.java<|end_file_name|><|fim▁begin|>/* * Copyright 2002-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package lila.runtime; import java.io.PrintWriter; import java.io.StringWriter; /** * <p>A PrintWriter that maintains a String as its backing store.</p> * * <p>Usage: * <pre> * StringPrintWriter out = new StringPrintWriter(); * printTo(out); * System.out.println( out.getString() ); * </pre> * </p> * * @author Alex Chaffee * @author Scott Stanchfield * @author Gary D. Gregory * @since 2.0 */ class StringPrintWriter extends PrintWriter { /** * Constructs a new instance. */ public StringPrintWriter() { super(new StringWriter()); } /** * Constructs a new instance using the specified initial string-buffer * size. * * @param initialSize an int specifying the initial size of the buffer. */ public StringPrintWriter(int initialSize) { super(new StringWriter(initialSize)); } /** * <p>Since toString() returns information *about* this object, we * want a separate method to extract just the contents of the * internal buffer as a String.</p> * * @return the contents of the internal string buffer */ public String getString() { flush();<|fim▁hole|> } }<|fim▁end|>
return ((StringWriter) this.out).toString();
<|file_name|>Class.py<|end_file_name|><|fim▁begin|># GemRB - Infinity Engine Emulator # Copyright (C) 2003 The GemRB Project # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # #character generation, class (GUICG2) import GemRB from GUIDefines import * import CommonTables ClassWindow = 0 TextAreaControl = 0 DoneButton = 0 BackButton = 0 ClassCount = 0 HasSubClass = 0<|fim▁hole|>def AdjustTextArea(): global HasSubClass, ClassID Class = GemRB.GetVar("Class")-1 TextAreaControl.SetText(CommonTables.Classes.GetValue(Class,1) ) ClassName = CommonTables.Classes.GetRowName(Class) ClassID = CommonTables.Classes.GetValue(ClassName, "ID") #determining if this class has any subclasses HasSubClass = 0 for i in range(1, ClassCount): ClassName = CommonTables.Classes.GetRowName(i-1) #determining if this is a kit or class Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS") if Allowed != ClassID: continue HasSubClass = 1 break if HasSubClass == 0: DoneButton.SetState(IE_GUI_BUTTON_ENABLED) else: DoneButton.SetState(IE_GUI_BUTTON_DISABLED) return def OnLoad(): global ClassWindow, TextAreaControl, DoneButton, BackButton global ClassCount GemRB.LoadWindowPack("GUICG", 800, 600) #this replaces help02.2da for class restrictions ClassCount = CommonTables.Classes.GetRowCount()+1 ClassWindow = GemRB.LoadWindow(2) rid = CommonTables.Races.FindValue(3, GemRB.GetVar('BaseRace')) RaceName = CommonTables.Races.GetRowName(rid) #radiobutton groups must be set up before doing anything else to them j = 0 for i in range(1,ClassCount): ClassName = CommonTables.Classes.GetRowName(i-1) Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS") if Allowed > 0: continue Button = ClassWindow.GetControl(j+2) j = j+1 Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET) Button.SetState(IE_GUI_BUTTON_DISABLED) j = 0 for i in range(1,ClassCount): ClassName = CommonTables.Classes.GetRowName(i-1) #determining if this is a kit or class Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS") if Allowed > 0: continue Allowed = CommonTables.Classes.GetValue(ClassName, RaceName) Button = ClassWindow.GetControl(j+2) j = j+1 t = CommonTables.Classes.GetValue(ClassName, "NAME_REF") Button.SetText(t ) if Allowed==0: continue Button.SetState(IE_GUI_BUTTON_ENABLED) Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, ClassPress) Button.SetVarAssoc("Class", i) BackButton = ClassWindow.GetControl(17) BackButton.SetText(15416) BackButton.SetFlags(IE_GUI_BUTTON_CANCEL,OP_OR) DoneButton = ClassWindow.GetControl(0) DoneButton.SetText(36789) DoneButton.SetFlags(IE_GUI_BUTTON_DEFAULT,OP_OR) ScrollBarControl = ClassWindow.GetControl(15) TextAreaControl = ClassWindow.GetControl(16) Class = GemRB.GetVar("Class")-1 if Class<0: TextAreaControl.SetText(17242) DoneButton.SetState(IE_GUI_BUTTON_DISABLED) else: AdjustTextArea() DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, NextPress) BackButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, BackPress) ClassWindow.SetVisible(WINDOW_VISIBLE) return def ClassPress(): global HasSubClass AdjustTextArea() if HasSubClass == 0: return DoneButton.SetState(IE_GUI_BUTTON_DISABLED) j = 0 for i in range(1,ClassCount): ClassName = CommonTables.Classes.GetRowName(i-1) Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS") if Allowed > 0: continue Button = ClassWindow.GetControl(j+2) j = j+1 Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON, OP_SET) Button.SetState(IE_GUI_BUTTON_DISABLED) Button.SetText("") j=0 for i in range(1, ClassCount): ClassName = CommonTables.Classes.GetRowName(i-1) #determining if this is a kit or class Allowed = CommonTables.Classes.GetValue(ClassName, "CLASS") if Allowed != ClassID: continue Button = ClassWindow.GetControl(j+2) j = j+1 t = CommonTables.Classes.GetValue(ClassName, "NAME_REF") Button.SetText(t ) Button.SetState(IE_GUI_BUTTON_ENABLED) Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, ClassPress2) Button.SetVarAssoc("Class", i) BackButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, BackPress2) return def ClassPress2(): Class = GemRB.GetVar("Class")-1 TextAreaControl.SetText(CommonTables.Classes.GetValue(Class,1) ) DoneButton.SetState(IE_GUI_BUTTON_ENABLED) return def BackPress2(): DoneButton.SetState(IE_GUI_BUTTON_DISABLED) if ClassWindow: ClassWindow.Unload() OnLoad() return def BackPress(): if ClassWindow: ClassWindow.Unload() GemRB.SetNextScript("CharGen3") GemRB.SetVar("Class",0) #scrapping the class value MyChar = GemRB.GetVar("Slot") GemRB.SetPlayerStat (IE_CLASS, 0) return def NextPress(): #classcolumn is base class Class = GemRB.GetVar("Class") ClassColumn = CommonTables.Classes.GetValue(Class - 1, 3) if ClassColumn <= 0: #it was already a base class ClassColumn = Class GemRB.SetVar("BaseClass", ClassColumn) if ClassWindow: ClassWindow.Unload() GemRB.SetNextScript("CharGen4") #alignment return<|fim▁end|>
ClassID = 0
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright 2013 Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### import cherrypy import os import six from girder.constants import PACKAGE_DIR def _mergeConfig(filename): """ Load `filename` into the cherrypy config. Also, handle global options by putting them in the root. """ cherrypy._cpconfig.merge(cherrypy.config, filename) # When in Sphinx, cherrypy may be mocked and returning None global_config = cherrypy.config.pop('global', {}) or {} for option, value in six.viewitems(global_config): cherrypy.config[option] = value def _loadConfigsByPrecedent(): """ Load configuration in reverse order of precedent. """ configPaths = [os.path.join(PACKAGE_DIR, 'conf', 'girder.dist.cfg')] if 'GIRDER_TEST_DB' not in os.environ: # we don't want to load the local config file if we are running tests configPaths.append(os.path.join(PACKAGE_DIR, 'conf', 'girder.local.cfg')) configPaths.append(os.path.join('/etc', 'girder.cfg')) configPaths.append(os.path.join(os.path.expanduser('~'), '.girder', 'girder.cfg')) if 'GIRDER_CONFIG' in os.environ: configPaths.append(os.environ['GIRDER_CONFIG']) for curConfigPath in configPaths: if os.path.exists(curConfigPath): _mergeConfig(curConfigPath) def loadConfig(): _loadConfigsByPrecedent() if 'GIRDER_PORT' in os.environ: port = int(os.environ['GIRDER_PORT']) cherrypy.config['server.socket_port'] = port<|fim▁hole|> cherrypy.config['database'] = {} cherrypy.config['database']['uri'] = os.getenv('GIRDER_MONGO_URI') if 'GIRDER_TEST_DB' in os.environ: cherrypy.config['database']['uri'] =\ os.environ['GIRDER_TEST_DB'].replace('.', '_') def getConfig(): if 'database' not in cherrypy.config: loadConfig() # When in Sphinx, cherrypy may be mocked and returning None return cherrypy.config or {}<|fim▁end|>
if 'GIRDER_MONGO_URI' in os.environ: if 'database' not in cherrypy.config:
<|file_name|>HomeController.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.carlomicieli.nerdmovies.controllers; import com.github.carlomicieli.nerdmovies.services.MovieService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; /**<|fim▁hole|>public class HomeController { private MovieService movieService; @Autowired public HomeController(MovieService movieService) { this.movieService = movieService; } @RequestMapping(value = {"/", "/home"}, method = RequestMethod.GET) public String index(Model model) { model.addAttribute("movies", movieService.getRecentMovies(10)); return "home/index"; } @RequestMapping(value = "/about", method = RequestMethod.GET) public String about() { return "home/about"; } @RequestMapping(value = "/default", method = RequestMethod.GET) public String defaultPage() { return "home/index"; } }<|fim▁end|>
* @author Carlo Micieli */ @Controller @RequestMapping("/")
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|># Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from copy import deepcopy from recipe_engine import recipe_test_api class iOSTestApi(recipe_test_api.RecipeTestApi): @recipe_test_api.mod_test_data @staticmethod def build_config(config): return deepcopy(config) def make_test_build_config(self, config): return self.build_config(config) @recipe_test_api.mod_test_data @staticmethod def parent_build_config(config): return deepcopy(config) def make_test_build_config_for_parent(self, config): return self.parent_build_config(config) def host_info(self): return self.m.json.output({ 'Mac OS X Version': '1.2.3', 'Xcode Version': '6.7.8', 'Xcode Build Version': '5D342509a', 'Xcode SDKs': [ 'fake sdk 1.0', 'fake sdk 1.1', 'fake sdk 2.0', ], }) def test_results(self): return self.m.json.output({ 'links': { 'fake URL text': 'fake URL', }, 'logs': { 'fake log': [ 'fake log line 1', 'fake log line 2', ], }<|fim▁hole|><|fim▁end|>
})
<|file_name|>grammar.peg.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2018 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package m3ql //go:generate peg -inline -switch src/query/parser/m3ql/grammar.peg import ( "fmt" "io" "math" "os" "sort" "strconv" ) const endSymbol rune = 1114112 /* The rule types inferred from the grammar are below. */ type pegRule uint8 const ( ruleUnknown pegRule = iota ruleGrammar ruleMacroDef rulePipeline ruleExpression ruleFunctionCall ruleArgument ruleKeywordSpecifier ruleNesting ruleSpacing ruleSpace ruleEOL ruleComment ruleCommentStart ruleIdentifier ruleIdentifierStart ruleIdentifierChars ruleOperator ruleOperatorSymbols ruleBoolean ruleTrue ruleFalse ruleNumber ruleIntegralNumber ruleFloatingNumber ruleMinus ruleStringLiteral ruleQuoteChar rulePattern rulePatternChars ruleGlobSymbols ruleSemicolon ruleEquals rulePipe ruleLParenthesis ruleRParenthesis ruleColon ruleEOF ruleAction0 ruleAction1 ruleAction2 ruleAction3 ruleAction4 ruleAction5 ruleAction6 ruleAction7 ruleAction8 ruleAction9 rulePegText ) var rul3s = [...]string{ "Unknown", "Grammar", "MacroDef", "Pipeline", "Expression", "FunctionCall", "Argument", "KeywordSpecifier", "Nesting", "Spacing", "Space", "EOL", "Comment", "CommentStart", "Identifier", "IdentifierStart", "IdentifierChars", "Operator", "OperatorSymbols", "Boolean", "True", "False", "Number", "IntegralNumber", "FloatingNumber", "Minus", "StringLiteral", "QuoteChar", "Pattern", "PatternChars", "GlobSymbols", "Semicolon", "Equals", "Pipe", "LParenthesis", "RParenthesis", "Colon", "EOF", "Action0", "Action1", "Action2", "Action3", "Action4", "Action5", "Action6", "Action7", "Action8", "Action9", "PegText", } type token32 struct { pegRule begin, end uint32 } func (t *token32) String() string { return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) } type node32 struct { token32 up, next *node32 } func (node *node32) print(w io.Writer, pretty bool, buffer string) { var print func(node *node32, depth int) print = func(node *node32, depth int) { for node != nil { for c := 0; c < depth; c++ { fmt.Printf(" ") } rule := rul3s[node.pegRule] quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) if !pretty { fmt.Fprintf(w, "%v %v\n", rule, quote) } else { fmt.Fprintf(w, "\x1B[34m%v\x1B[m %v\n", rule, quote) } if node.up != nil { print(node.up, depth+1) } node = node.next } } print(node, 0) } func (node *node32) Print(w io.Writer, buffer string) { node.print(w, false, buffer) } func (node *node32) PrettyPrint(w io.Writer, buffer string) { node.print(w, true, buffer) } type tokens32 struct { tree []token32 } func (t *tokens32) Trim(length uint32) { t.tree = t.tree[:length] } func (t *tokens32) Print() { for _, token := range t.tree { fmt.Println(token.String()) } } func (t *tokens32) AST() *node32 { type element struct { node *node32 down *element } tokens := t.Tokens() var stack *element for _, token := range tokens { if token.begin == token.end { continue } node := &node32{token32: token} for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { stack.node.next = node.up node.up = stack.node stack = stack.down } stack = &element{node: node, down: stack} } if stack != nil { return stack.node } return nil<|fim▁hole|>} func (t *tokens32) PrintSyntaxTree(buffer string) { t.AST().Print(os.Stdout, buffer) } func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { t.AST().Print(w, buffer) } func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { t.AST().PrettyPrint(os.Stdout, buffer) } func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { if tree := t.tree; int(index) >= len(tree) { expanded := make([]token32, 2*len(tree)) copy(expanded, tree) t.tree = expanded } t.tree[index] = token32{ pegRule: rule, begin: begin, end: end, } } func (t *tokens32) Tokens() []token32 { return t.tree } type m3ql struct { scriptBuilder Buffer string buffer []rune rules [49]func() bool parse func(rule ...int) error reset func() Pretty bool tokens32 } func (p *m3ql) Parse(rule ...int) error { return p.parse(rule...) } func (p *m3ql) Reset() { p.reset() } type textPosition struct { line, symbol int } type textPositionMap map[int]textPosition func translatePositions(buffer []rune, positions []int) textPositionMap { length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 sort.Ints(positions) search: for i, c := range buffer { if c == '\n' { line, symbol = line+1, 0 } else { symbol++ } if i == positions[j] { translations[positions[j]] = textPosition{line, symbol} for j++; j < length; j++ { if i != positions[j] { continue search } } break search } } return translations } type parseError struct { p *m3ql max token32 } func (e *parseError) Error() string { tokens, error := []token32{e.max}, "\n" positions, p := make([]int, 2*len(tokens)), 0 for _, token := range tokens { positions[p], p = int(token.begin), p+1 positions[p], p = int(token.end), p+1 } translations := translatePositions(e.p.buffer, positions) format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" if e.p.Pretty { format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" } for _, token := range tokens { begin, end := int(token.begin), int(token.end) error += fmt.Sprintf(format, rul3s[token.pegRule], translations[begin].line, translations[begin].symbol, translations[end].line, translations[end].symbol, strconv.Quote(string(e.p.buffer[begin:end]))) } return error } func (p *m3ql) PrintSyntaxTree() { if p.Pretty { p.tokens32.PrettyPrintSyntaxTree(p.Buffer) } else { p.tokens32.PrintSyntaxTree(p.Buffer) } } func (p *m3ql) WriteSyntaxTree(w io.Writer) { p.tokens32.WriteSyntaxTree(w, p.Buffer) } func (p *m3ql) Execute() { buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 for _, token := range p.Tokens() { switch token.pegRule { case rulePegText: begin, end = int(token.begin), int(token.end) text = string(_buffer[begin:end]) case ruleAction0: p.newMacro(text) case ruleAction1: p.newPipeline() case ruleAction2: p.endPipeline() case ruleAction3: p.newExpression(text) case ruleAction4: p.endExpression() case ruleAction5: p.newBooleanArgument(text) case ruleAction6: p.newNumericArgument(text) case ruleAction7: p.newPatternArgument(text) case ruleAction8: p.newStringLiteralArgument(text) case ruleAction9: p.newKeywordArgument(text) } } _, _, _, _, _ = buffer, _buffer, text, begin, end } func (p *m3ql) Init() { var ( max token32 position, tokenIndex uint32 buffer []rune ) p.reset = func() { max = token32{} position, tokenIndex = 0, 0 p.buffer = []rune(p.Buffer) if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { p.buffer = append(p.buffer, endSymbol) } buffer = p.buffer } p.reset() _rules := p.rules tree := tokens32{tree: make([]token32, math.MaxInt16)} p.parse = func(rule ...int) error { r := 1 if len(rule) > 0 { r = rule[0] } matches := p.rules[r]() p.tokens32 = tree if matches { p.Trim(tokenIndex) return nil } return &parseError{p, max} } add := func(rule pegRule, begin uint32) { tree.Add(rule, begin, position, tokenIndex) tokenIndex++ if begin != position && position > max.end { max = token32{rule, begin, position} } } matchDot := func() bool { if buffer[position] != endSymbol { position++ return true } return false } /*matchChar := func(c byte) bool { if buffer[position] == c { position++ return true } return false }*/ /*matchRange := func(lower byte, upper byte) bool { if c := buffer[position]; c >= lower && c <= upper { position++ return true } return false }*/ _rules = [...]func() bool{ nil, /* 0 Grammar <- <(Spacing (MacroDef Semicolon)* Pipeline EOF)> */ func() bool { position0, tokenIndex0 := position, tokenIndex { position1 := position if !_rules[ruleSpacing]() { goto l0 } l2: { position3, tokenIndex3 := position, tokenIndex { position4 := position if !_rules[ruleIdentifier]() { goto l3 } { add(ruleAction0, position) } { position6 := position if buffer[position] != rune('=') { goto l3 } position++ if !_rules[ruleSpacing]() { goto l3 } add(ruleEquals, position6) } if !_rules[rulePipeline]() { goto l3 } add(ruleMacroDef, position4) } { position7 := position if buffer[position] != rune(';') { goto l3 } position++ if !_rules[ruleSpacing]() { goto l3 } add(ruleSemicolon, position7) } goto l2 l3: position, tokenIndex = position3, tokenIndex3 } if !_rules[rulePipeline]() { goto l0 } { position8 := position { position9, tokenIndex9 := position, tokenIndex if !matchDot() { goto l9 } goto l0 l9: position, tokenIndex = position9, tokenIndex9 } add(ruleEOF, position8) } add(ruleGrammar, position1) } return true l0: position, tokenIndex = position0, tokenIndex0 return false }, /* 1 MacroDef <- <(Identifier Action0 Equals Pipeline)> */ nil, /* 2 Pipeline <- <(Action1 Expression (Pipe Expression)* Action2)> */ func() bool { position11, tokenIndex11 := position, tokenIndex { position12 := position { add(ruleAction1, position) } if !_rules[ruleExpression]() { goto l11 } l14: { position15, tokenIndex15 := position, tokenIndex { position16 := position if buffer[position] != rune('|') { goto l15 } position++ if !_rules[ruleSpacing]() { goto l15 } add(rulePipe, position16) } if !_rules[ruleExpression]() { goto l15 } goto l14 l15: position, tokenIndex = position15, tokenIndex15 } { add(ruleAction2, position) } add(rulePipeline, position12) } return true l11: position, tokenIndex = position11, tokenIndex11 return false }, /* 3 Expression <- <(FunctionCall / Nesting)> */ func() bool { position18, tokenIndex18 := position, tokenIndex { position19 := position { position20, tokenIndex20 := position, tokenIndex { position22 := position { position23, tokenIndex23 := position, tokenIndex if !_rules[ruleIdentifier]() { goto l24 } goto l23 l24: position, tokenIndex = position23, tokenIndex23 { position25 := position { position26 := position { position27 := position { position28, tokenIndex28 := position, tokenIndex if buffer[position] != rune('<') { goto l29 } position++ if buffer[position] != rune('=') { goto l29 } position++ goto l28 l29: position, tokenIndex = position28, tokenIndex28 if buffer[position] != rune('>') { goto l30 } position++ if buffer[position] != rune('=') { goto l30 } position++ goto l28 l30: position, tokenIndex = position28, tokenIndex28 { switch buffer[position] { case '>': if buffer[position] != rune('>') { goto l21 } position++ break case '!': if buffer[position] != rune('!') { goto l21 } position++ if buffer[position] != rune('=') { goto l21 } position++ break case '=': if buffer[position] != rune('=') { goto l21 } position++ if buffer[position] != rune('=') { goto l21 } position++ break default: if buffer[position] != rune('<') { goto l21 } position++ break } } } l28: add(ruleOperatorSymbols, position27) } add(rulePegText, position26) } if !_rules[ruleSpacing]() { goto l21 } add(ruleOperator, position25) } } l23: { add(ruleAction3, position) } l33: { position34, tokenIndex34 := position, tokenIndex { position35 := position { position36, tokenIndex36 := position, tokenIndex { position38 := position if !_rules[ruleIdentifier]() { goto l36 } { add(ruleAction9, position) } { position40 := position if buffer[position] != rune(':') { goto l36 } position++ if !_rules[ruleSpacing]() { goto l36 } add(ruleColon, position40) } add(ruleKeywordSpecifier, position38) } goto l37 l36: position, tokenIndex = position36, tokenIndex36 } l37: { position41, tokenIndex41 := position, tokenIndex { position43 := position { position44 := position { position45, tokenIndex45 := position, tokenIndex { position47 := position { position48, tokenIndex48 := position, tokenIndex if buffer[position] != rune('t') { goto l49 } position++ goto l48 l49: position, tokenIndex = position48, tokenIndex48 if buffer[position] != rune('T') { goto l46 } position++ } l48: { position50, tokenIndex50 := position, tokenIndex if buffer[position] != rune('r') { goto l51 } position++ goto l50 l51: position, tokenIndex = position50, tokenIndex50 if buffer[position] != rune('R') { goto l46 } position++ } l50: { position52, tokenIndex52 := position, tokenIndex if buffer[position] != rune('u') { goto l53 } position++ goto l52 l53: position, tokenIndex = position52, tokenIndex52 if buffer[position] != rune('U') { goto l46 } position++ } l52: { position54, tokenIndex54 := position, tokenIndex if buffer[position] != rune('e') { goto l55 } position++ goto l54 l55: position, tokenIndex = position54, tokenIndex54 if buffer[position] != rune('E') { goto l46 } position++ } l54: add(ruleTrue, position47) } goto l45 l46: position, tokenIndex = position45, tokenIndex45 { position56 := position { position57, tokenIndex57 := position, tokenIndex if buffer[position] != rune('f') { goto l58 } position++ goto l57 l58: position, tokenIndex = position57, tokenIndex57 if buffer[position] != rune('F') { goto l42 } position++ } l57: { position59, tokenIndex59 := position, tokenIndex if buffer[position] != rune('a') { goto l60 } position++ goto l59 l60: position, tokenIndex = position59, tokenIndex59 if buffer[position] != rune('A') { goto l42 } position++ } l59: { position61, tokenIndex61 := position, tokenIndex if buffer[position] != rune('l') { goto l62 } position++ goto l61 l62: position, tokenIndex = position61, tokenIndex61 if buffer[position] != rune('L') { goto l42 } position++ } l61: { position63, tokenIndex63 := position, tokenIndex if buffer[position] != rune('s') { goto l64 } position++ goto l63 l64: position, tokenIndex = position63, tokenIndex63 if buffer[position] != rune('S') { goto l42 } position++ } l63: { position65, tokenIndex65 := position, tokenIndex if buffer[position] != rune('e') { goto l66 } position++ goto l65 l66: position, tokenIndex = position65, tokenIndex65 if buffer[position] != rune('E') { goto l42 } position++ } l65: add(ruleFalse, position56) } } l45: add(rulePegText, position44) } { position67, tokenIndex67 := position, tokenIndex if !_rules[rulePatternChars]() { goto l67 } goto l42 l67: position, tokenIndex = position67, tokenIndex67 } if !_rules[ruleSpacing]() { goto l42 } add(ruleBoolean, position43) } { add(ruleAction5, position) } goto l41 l42: position, tokenIndex = position41, tokenIndex41 { position70 := position { position71 := position { position72, tokenIndex72 := position, tokenIndex { position74 := position if buffer[position] != rune('-') { goto l72 } position++ add(ruleMinus, position74) } goto l73 l72: position, tokenIndex = position72, tokenIndex72 } l73: { position75, tokenIndex75 := position, tokenIndex { position77 := position { position78, tokenIndex78 := position, tokenIndex if !_rules[ruleIntegralNumber]() { goto l78 } goto l79 l78: position, tokenIndex = position78, tokenIndex78 } l79: if buffer[position] != rune('.') { goto l76 } position++ if !_rules[ruleIntegralNumber]() { goto l76 } add(ruleFloatingNumber, position77) } goto l75 l76: position, tokenIndex = position75, tokenIndex75 if !_rules[ruleIntegralNumber]() { goto l69 } } l75: add(rulePegText, position71) } { position80, tokenIndex80 := position, tokenIndex if !_rules[rulePatternChars]() { goto l80 } goto l69 l80: position, tokenIndex = position80, tokenIndex80 } if !_rules[ruleSpacing]() { goto l69 } add(ruleNumber, position70) } { add(ruleAction6, position) } goto l41 l69: position, tokenIndex = position41, tokenIndex41 { switch buffer[position] { case '(': if !_rules[ruleNesting]() { goto l34 } break case '"': { position83 := position if !_rules[ruleQuoteChar]() { goto l34 } { position84 := position l85: { position86, tokenIndex86 := position, tokenIndex { position87, tokenIndex87 := position, tokenIndex if buffer[position] != rune('"') { goto l87 } position++ goto l86 l87: position, tokenIndex = position87, tokenIndex87 } if !matchDot() { goto l86 } goto l85 l86: position, tokenIndex = position86, tokenIndex86 } add(rulePegText, position84) } if !_rules[ruleQuoteChar]() { goto l34 } if !_rules[ruleSpacing]() { goto l34 } add(ruleStringLiteral, position83) } { add(ruleAction8, position) } break default: { position89 := position { position90 := position if !_rules[rulePatternChars]() { goto l34 } l91: { position92, tokenIndex92 := position, tokenIndex if !_rules[rulePatternChars]() { goto l92 } goto l91 l92: position, tokenIndex = position92, tokenIndex92 } add(rulePegText, position90) } if !_rules[ruleSpacing]() { goto l34 } add(rulePattern, position89) } { add(ruleAction7, position) } break } } } l41: add(ruleArgument, position35) } goto l33 l34: position, tokenIndex = position34, tokenIndex34 } { add(ruleAction4, position) } add(ruleFunctionCall, position22) } goto l20 l21: position, tokenIndex = position20, tokenIndex20 if !_rules[ruleNesting]() { goto l18 } } l20: add(ruleExpression, position19) } return true l18: position, tokenIndex = position18, tokenIndex18 return false }, /* 4 FunctionCall <- <((Identifier / Operator) Action3 Argument* Action4)> */ nil, /* 5 Argument <- <(KeywordSpecifier? ((Boolean Action5) / (Number Action6) / ((&('(') Nesting) | (&('"') (StringLiteral Action8)) | (&('$' | '*' | ',' | '-' | '.' | '/' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '?' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '[' | '\\' | ']' | '^' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | '{' | '}') (Pattern Action7)))))> */ nil, /* 6 KeywordSpecifier <- <(Identifier Action9 Colon)> */ nil, /* 7 Nesting <- <(LParenthesis Pipeline RParenthesis)> */ func() bool { position98, tokenIndex98 := position, tokenIndex { position99 := position { position100 := position if buffer[position] != rune('(') { goto l98 } position++ if !_rules[ruleSpacing]() { goto l98 } add(ruleLParenthesis, position100) } if !_rules[rulePipeline]() { goto l98 } { position101 := position if buffer[position] != rune(')') { goto l98 } position++ if !_rules[ruleSpacing]() { goto l98 } add(ruleRParenthesis, position101) } add(ruleNesting, position99) } return true l98: position, tokenIndex = position98, tokenIndex98 return false }, /* 8 Spacing <- <((&('#') Comment) | (&('\n' | '\r') EOL) | (&('\t' | ' ') Space))*> */ func() bool { { position103 := position l104: { position105, tokenIndex105 := position, tokenIndex { switch buffer[position] { case '#': { position107 := position { position108 := position if buffer[position] != rune('#') { goto l105 } position++ add(ruleCommentStart, position108) } l109: { position110, tokenIndex110 := position, tokenIndex { position111, tokenIndex111 := position, tokenIndex if !_rules[ruleEOL]() { goto l111 } goto l110 l111: position, tokenIndex = position111, tokenIndex111 } if !matchDot() { goto l110 } goto l109 l110: position, tokenIndex = position110, tokenIndex110 } add(ruleComment, position107) } break case '\n', '\r': if !_rules[ruleEOL]() { goto l105 } break default: { position112 := position { position113, tokenIndex113 := position, tokenIndex if buffer[position] != rune(' ') { goto l114 } position++ goto l113 l114: position, tokenIndex = position113, tokenIndex113 if buffer[position] != rune('\t') { goto l105 } position++ } l113: add(ruleSpace, position112) } break } } goto l104 l105: position, tokenIndex = position105, tokenIndex105 } add(ruleSpacing, position103) } return true }, /* 9 Space <- <(' ' / '\t')> */ nil, /* 10 EOL <- <(('\r' '\n') / '\n' / '\r')> */ func() bool { position116, tokenIndex116 := position, tokenIndex { position117 := position { position118, tokenIndex118 := position, tokenIndex if buffer[position] != rune('\r') { goto l119 } position++ if buffer[position] != rune('\n') { goto l119 } position++ goto l118 l119: position, tokenIndex = position118, tokenIndex118 if buffer[position] != rune('\n') { goto l120 } position++ goto l118 l120: position, tokenIndex = position118, tokenIndex118 if buffer[position] != rune('\r') { goto l116 } position++ } l118: add(ruleEOL, position117) } return true l116: position, tokenIndex = position116, tokenIndex116 return false }, /* 11 Comment <- <(CommentStart (!EOL .)*)> */ nil, /* 12 CommentStart <- <'#'> */ nil, /* 13 Identifier <- <(<(IdentifierStart IdentifierChars*)> Spacing)> */ func() bool { position123, tokenIndex123 := position, tokenIndex { position124 := position { position125 := position if !_rules[ruleIdentifierStart]() { goto l123 } l126: { position127, tokenIndex127 := position, tokenIndex if !_rules[ruleIdentifierChars]() { goto l127 } goto l126 l127: position, tokenIndex = position127, tokenIndex127 } add(rulePegText, position125) } if !_rules[ruleSpacing]() { goto l123 } add(ruleIdentifier, position124) } return true l123: position, tokenIndex = position123, tokenIndex123 return false }, /* 14 IdentifierStart <- <((&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))> */ func() bool { position128, tokenIndex128 := position, tokenIndex { position129 := position { switch buffer[position] { case '_': if buffer[position] != rune('_') { goto l128 } position++ break case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': if c := buffer[position]; c < rune('A') || c > rune('Z') { goto l128 } position++ break default: if c := buffer[position]; c < rune('a') || c > rune('z') { goto l128 } position++ break } } add(ruleIdentifierStart, position129) } return true l128: position, tokenIndex = position128, tokenIndex128 return false }, /* 15 IdentifierChars <- <((&('\\') '\\') | (&('/') '/') | (&('-') '-') | (&('.') '.') | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]) | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') IdentifierStart))> */ func() bool { position131, tokenIndex131 := position, tokenIndex { position132 := position { switch buffer[position] { case '\\': if buffer[position] != rune('\\') { goto l131 } position++ break case '/': if buffer[position] != rune('/') { goto l131 } position++ break case '-': if buffer[position] != rune('-') { goto l131 } position++ break case '.': if buffer[position] != rune('.') { goto l131 } position++ break case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': if c := buffer[position]; c < rune('0') || c > rune('9') { goto l131 } position++ break default: if !_rules[ruleIdentifierStart]() { goto l131 } break } } add(ruleIdentifierChars, position132) } return true l131: position, tokenIndex = position131, tokenIndex131 return false }, /* 16 Operator <- <(<OperatorSymbols> Spacing)> */ nil, /* 17 OperatorSymbols <- <(('<' '=') / ('>' '=') / ((&('>') '>') | (&('!') ('!' '=')) | (&('=') ('=' '=')) | (&('<') '<')))> */ nil, /* 18 Boolean <- <(<(True / False)> !PatternChars Spacing)> */ nil, /* 19 True <- <(('t' / 'T') ('r' / 'R') ('u' / 'U') ('e' / 'E'))> */ nil, /* 20 False <- <(('f' / 'F') ('a' / 'A') ('l' / 'L') ('s' / 'S') ('e' / 'E'))> */ nil, /* 21 Number <- <(<(Minus? (FloatingNumber / IntegralNumber))> !PatternChars Spacing)> */ nil, /* 22 IntegralNumber <- <[0-9]+> */ func() bool { position140, tokenIndex140 := position, tokenIndex { position141 := position if c := buffer[position]; c < rune('0') || c > rune('9') { goto l140 } position++ l142: { position143, tokenIndex143 := position, tokenIndex if c := buffer[position]; c < rune('0') || c > rune('9') { goto l143 } position++ goto l142 l143: position, tokenIndex = position143, tokenIndex143 } add(ruleIntegralNumber, position141) } return true l140: position, tokenIndex = position140, tokenIndex140 return false }, /* 23 FloatingNumber <- <(IntegralNumber? '.' IntegralNumber)> */ nil, /* 24 Minus <- <'-'> */ nil, /* 25 StringLiteral <- <(QuoteChar <(!'"' .)*> QuoteChar Spacing)> */ nil, /* 26 QuoteChar <- <'"'> */ func() bool { position147, tokenIndex147 := position, tokenIndex { position148 := position if buffer[position] != rune('"') { goto l147 } position++ add(ruleQuoteChar, position148) } return true l147: position, tokenIndex = position147, tokenIndex147 return false }, /* 27 Pattern <- <(<PatternChars+> Spacing)> */ nil, /* 28 PatternChars <- <(IdentifierChars / GlobSymbols)> */ func() bool { position150, tokenIndex150 := position, tokenIndex { position151 := position { position152, tokenIndex152 := position, tokenIndex if !_rules[ruleIdentifierChars]() { goto l153 } goto l152 l153: position, tokenIndex = position152, tokenIndex152 { position154 := position { switch buffer[position] { case '$': if buffer[position] != rune('$') { goto l150 } position++ break case '^': if buffer[position] != rune('^') { goto l150 } position++ break case ',': if buffer[position] != rune(',') { goto l150 } position++ break case '?': if buffer[position] != rune('?') { goto l150 } position++ break case '*': if buffer[position] != rune('*') { goto l150 } position++ break case ']': if buffer[position] != rune(']') { goto l150 } position++ break case '[': if buffer[position] != rune('[') { goto l150 } position++ break case '}': if buffer[position] != rune('}') { goto l150 } position++ break default: if buffer[position] != rune('{') { goto l150 } position++ break } } add(ruleGlobSymbols, position154) } } l152: add(rulePatternChars, position151) } return true l150: position, tokenIndex = position150, tokenIndex150 return false }, /* 29 GlobSymbols <- <((&('$') '$') | (&('^') '^') | (&(',') ',') | (&('?') '?') | (&('*') '*') | (&(']') ']') | (&('[') '[') | (&('}') '}') | (&('{') '{'))> */ nil, /* 30 Semicolon <- <(';' Spacing)> */ nil, /* 31 Equals <- <('=' Spacing)> */ nil, /* 32 Pipe <- <('|' Spacing)> */ nil, /* 33 LParenthesis <- <('(' Spacing)> */ nil, /* 34 RParenthesis <- <(')' Spacing)> */ nil, /* 35 Colon <- <(':' Spacing)> */ nil, /* 36 EOF <- <!.> */ nil, /* 38 Action0 <- <{ p.newMacro(text) }> */ nil, /* 39 Action1 <- <{ p.newPipeline() }> */ nil, /* 40 Action2 <- <{ p.endPipeline() }> */ nil, /* 41 Action3 <- <{ p.newExpression(text) }> */ nil, /* 42 Action4 <- <{ p.endExpression() }> */ nil, /* 43 Action5 <- <{ p.newBooleanArgument(text) }> */ nil, /* 44 Action6 <- <{ p.newNumericArgument(text) }> */ nil, /* 45 Action7 <- <{ p.newPatternArgument(text) }> */ nil, /* 46 Action8 <- <{ p.newStringLiteralArgument(text) }> */ nil, /* 47 Action9 <- <{ p.newKeywordArgument(text) }> */ nil, nil, } p.rules = _rules }<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod ast; pub mod attr; mod ctxt; pub use self::ctxt::Ctxt; mod case; mod check; mod symbol; #[derive(Copy, Clone)] pub enum Derive { Serialize, Deserialize,<|fim▁hole|><|fim▁end|>
}
<|file_name|>IPPExportOptions.java<|end_file_name|><|fim▁begin|>/** Copyright 2010 Christian Kästner This file is part of CIDE. CIDE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 3 of the License. CIDE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CIDE. If not, see <http://www.gnu.org/licenses/>. See http://www.fosd.de/cide/ for further information. */ package de.ovgu.cide.export.virtual.internal; import java.util.Set; import org.eclipse.jdt.core.dom.CompilationUnit; <|fim▁hole|>import de.ovgu.cide.export.CopiedNaiveASTFlattener; import de.ovgu.cide.export.useroptions.IUserOptionProvider; import de.ovgu.cide.features.IFeature; import de.ovgu.cide.features.source.ColoredSourceFile; /** * how to print annotations? note: we assume ifdef semantics, i.e. annotations * may be nested, but always close in the reverse order * * * @author ckaestne * */ public interface IPPExportOptions extends IUserOptionProvider { /** * should the start and end instructions be printed in a new line? (i.e. * should a line break be enforced before?) * * the instruction is responsible for the linebreak at the end itself * * @return */ boolean inNewLine(); /** * get the code statement(s) to begin an annotation block * * @param f * set of features annotated for the current element * @return */ String getStartInstruction(Set<IFeature> f); /** * get the code statement(s) to end an annotation block * * @param f * set of features annotated for the current element * @return */ String getEndInstruction(Set<IFeature> f); CopiedNaiveASTFlattener getPrettyPrinter(ColoredSourceFile sourceFile); /** * allows the developer to change the AST before printing it. can be used * for some refactorings. returns the modified AST * * @param root * @param sourceFile * @return */ CompilationUnit refactorAST(CompilationUnit root, ColoredSourceFile sourceFile); }<|fim▁end|>
<|file_name|>test_sftp_big.py<|end_file_name|><|fim▁begin|># Copyright (C) 2003-2009 Robey Pointer <[email protected]> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ some unit tests to make sure sftp works well with large files. a real actual sftp server is contacted, and a new folder is created there to do test file operations in (so no existing files will be harmed). """ import os import random import struct import sys import time import unittest from paramiko.common import o660 from tests.test_sftp import get_sftp FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000') class BigSFTPTest (unittest.TestCase): def setUp(self): global FOLDER sftp = get_sftp() for i in range(1000): FOLDER = FOLDER[:-3] + '%03d' % i try: sftp.mkdir(FOLDER) break except (IOError, OSError): pass def tearDown(self): sftp = get_sftp() sftp.rmdir(FOLDER) def test_1_lots_of_files(self): """ create a bunch of files over the same session. """ sftp = get_sftp() numfiles = 100 try: for i in range(numfiles): with sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f: f.write('this is file #%d.\n' % i) sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660) # now make sure every file is there, by creating a list of filenmes # and reading them in random order. numlist = list(range(numfiles)) while len(numlist) > 0: r = numlist[random.randint(0, len(numlist) - 1)] with sftp.open('%s/file%d.txt' % (FOLDER, r)) as f: self.assertEqual(f.readline(), 'this is file #%d.\n' % r) numlist.remove(r) finally: for i in range(numfiles): try: sftp.remove('%s/file%d.txt' % (FOLDER, i)) except: pass def test_2_big_file(self): """ write a 1MB file with no buffering. """ sftp = get_sftp() kblob = (1024 * b'x') start = time.time() try: with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f: for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) end = time.time() sys.stderr.write('%ds ' % round(end - start)) start = time.time() with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: for n in range(1024): data = f.read(1024) self.assertEqual(data, kblob) end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_3_big_file_pipelined(self): """ write a 1MB file, with no linefeeds, using pipelining. """ sftp = get_sftp() kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) start = time.time() try: with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) end = time.time() sys.stderr.write('%ds ' % round(end - start)) start = time.time() with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: file_size = f.stat().st_size f.prefetch(file_size) # read on odd boundaries to make sure the bytes aren't getting scrambled n = 0 k2blob = kblob + kblob chunk = 629 size = 1024 * 1024 while n < size: if n + chunk > size: chunk = size - n data = f.read(chunk) offset = n % 1024 self.assertEqual(data, k2blob[offset:offset + chunk]) n += chunk end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_4_prefetch_seek(self): sftp = get_sftp() kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: file_size = f.stat().st_size f.prefetch(file_size) base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) offsets = [base_offset + j * chunk for j in range(100)] # randomly seek around and read them out for j in range(100): offset = offsets[random.randint(0, len(offsets) - 1)] offsets.remove(offset) f.seek(offset) data = f.read(chunk) n_offset = offset % 1024 self.assertEqual(data, k2blob[n_offset:n_offset + chunk]) offset += chunk end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_5_readv_seek(self): sftp = get_sftp() kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) # make a bunch of offsets and put them in random order offsets = [base_offset + j * chunk for j in range(100)] readv_list = [] for j in range(100): o = offsets[random.randint(0, len(offsets) - 1)] offsets.remove(o) readv_list.append((o, chunk)) ret = f.readv(readv_list) for i in range(len(readv_list)): offset = readv_list[i][0] n_offset = offset % 1024 self.assertEqual(next(ret), k2blob[n_offset:n_offset + chunk]) end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_6_lots_of_prefetching(self): """ prefetch a 1MB file a bunch of times, discarding the file object without using it, to verify that paramiko doesn't get confused. """ sftp = get_sftp() kblob = (1024 * b'x') try: with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) for i in range(10): with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: file_size = f.stat().st_size f.prefetch(file_size) with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: file_size = f.stat().st_size f.prefetch(file_size) for n in range(1024): data = f.read(1024) self.assertEqual(data, kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_7_prefetch_readv(self): """ verify that prefetch and readv don't conflict with each other. """ sftp = get_sftp() kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: file_size = f.stat().st_size f.prefetch(file_size) data = f.read(1024) self.assertEqual(data, kblob) chunk_size = 793 base_offset = 512 * 1024 k2blob = kblob + kblob chunks = [(base_offset + (chunk_size * i), chunk_size) for i in range(20)] for data in f.readv(chunks): offset = base_offset % 1024 self.assertEqual(chunk_size, len(data)) self.assertEqual(k2blob[offset:offset + chunk_size], data) base_offset += chunk_size sys.stderr.write(' ') finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_8_large_readv(self): """ verify that a very large readv is broken up correctly and still returned as a single blob. """ sftp = get_sftp() kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: data = list(f.readv([(23 * 1024, 128 * 1024)])) self.assertEqual(1, len(data)) data = data[0] self.assertEqual(128 * 1024, len(data)) sys.stderr.write(' ') finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_9_big_file_big_buffer(self): """ write a 1MB file, with no linefeeds, and a big buffer. """ sftp = get_sftp() mblob = (1024 * 1024 * 'x') try: with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f: f.write(mblob) self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) finally: sftp.remove('%s/hongry.txt' % FOLDER) def test_A_big_file_renegotiate(self): """ write a 1MB file, forcing key renegotiation in the middle. """ sftp = get_sftp() t = sftp.sock.get_transport() t.packetizer.REKEY_BYTES = 512 * 1024 k32blob = (32 * 1024 * 'x') try: with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f: for i in range(32): f.write(k32blob) <|fim▁hole|> # try to read it too. with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f: file_size = f.stat().st_size f.prefetch(file_size) total = 0 while total < 1024 * 1024: total += len(f.read(32 * 1024)) finally: sftp.remove('%s/hongry.txt' % FOLDER) t.packetizer.REKEY_BYTES = pow(2, 30) if __name__ == '__main__': from tests.test_sftp import SFTPTest SFTPTest.init_loopback() from unittest import main main()<|fim▁end|>
self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) self.assertNotEqual(t.H, t.session_id)
<|file_name|>testbinding.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // check-tidy: no specs after this line use core::nonzero::NonZero; use dom::bindings::callback::ExceptionHandling; use dom::bindings::codegen::Bindings::EventListenerBinding::EventListener; use dom::bindings::codegen::Bindings::FunctionBinding::Function; use dom::bindings::codegen::Bindings::TestBindingBinding::{self, SimpleCallback}; use dom::bindings::codegen::Bindings::TestBindingBinding::{TestBindingMethods, TestDictionary}; use dom::bindings::codegen::Bindings::TestBindingBinding::{TestDictionaryDefaults, TestEnum}; use dom::bindings::codegen::UnionTypes; use dom::bindings::codegen::UnionTypes::{BlobOrBoolean, BlobOrBlobSequence, LongOrLongSequenceSequence}; use dom::bindings::codegen::UnionTypes::{BlobOrString, BlobOrUnsignedLong, EventOrString}; use dom::bindings::codegen::UnionTypes::{ByteStringOrLong, ByteStringSequenceOrLongOrString}; use dom::bindings::codegen::UnionTypes::{ByteStringSequenceOrLong, DocumentOrTestTypedef}; use dom::bindings::codegen::UnionTypes::{EventOrUSVString, HTMLElementOrLong, LongSequenceOrTestTypedef}; use dom::bindings::codegen::UnionTypes::{HTMLElementOrUnsignedLongOrStringOrBoolean, LongSequenceOrBoolean}; use dom::bindings::codegen::UnionTypes::{StringOrLongSequence, StringOrStringSequence, StringSequenceOrUnsignedLong}; use dom::bindings::codegen::UnionTypes::{StringOrUnsignedLong, StringOrBoolean, UnsignedLongOrBoolean}; use dom::bindings::error::{Error, Fallible}; use dom::bindings::js::Root; use dom::bindings::mozmap::MozMap; use dom::bindings::num::Finite; use dom::bindings::refcounted::TrustedPromise; use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object}; use dom::bindings::str::{ByteString, DOMString, USVString}; use dom::bindings::weakref::MutableWeakRef; use dom::blob::{Blob, BlobImpl}; use dom::globalscope::GlobalScope; use dom::promise::Promise; use dom::promisenativehandler::{PromiseNativeHandler, Callback}; use dom::url::URL; use js::jsapi::{HandleObject, HandleValue, JSContext, JSObject, JSAutoCompartment}; use js::jsapi::{JS_NewPlainObject, JS_NewUint8ClampedArray}; use js::jsval::{JSVal, NullValue}; use script_traits::MsDuration; use std::borrow::ToOwned; use std::ptr; use std::rc::Rc; use timers::OneshotTimerCallback; use util::prefs::PREFS; #[dom_struct] pub struct TestBinding { reflector_: Reflector, url: MutableWeakRef<URL>, } impl TestBinding { fn new_inherited() -> TestBinding { TestBinding { reflector_: Reflector::new(), url: MutableWeakRef::new(None), } } pub fn new(global: &GlobalScope) -> Root<TestBinding> { reflect_dom_object(box TestBinding::new_inherited(), global, TestBindingBinding::Wrap) } pub fn Constructor(global: &GlobalScope) -> Fallible<Root<TestBinding>> { Ok(TestBinding::new(global)) } #[allow(unused_variables)] pub fn Constructor_(global: &GlobalScope, nums: Vec<f64>) -> Fallible<Root<TestBinding>> { Ok(TestBinding::new(global)) } #[allow(unused_variables)] pub fn Constructor__(global: &GlobalScope, num: f64) -> Fallible<Root<TestBinding>> { Ok(TestBinding::new(global)) } } impl TestBindingMethods for TestBinding { fn BooleanAttribute(&self) -> bool { false } fn SetBooleanAttribute(&self, _: bool) {} fn ByteAttribute(&self) -> i8 { 0 } fn SetByteAttribute(&self, _: i8) {} fn OctetAttribute(&self) -> u8 { 0 } fn SetOctetAttribute(&self, _: u8) {} fn ShortAttribute(&self) -> i16 { 0 } fn SetShortAttribute(&self, _: i16) {} fn UnsignedShortAttribute(&self) -> u16 { 0 } fn SetUnsignedShortAttribute(&self, _: u16) {} fn LongAttribute(&self) -> i32 { 0 } fn SetLongAttribute(&self, _: i32) {} fn UnsignedLongAttribute(&self) -> u32 { 0 } fn SetUnsignedLongAttribute(&self, _: u32) {} fn LongLongAttribute(&self) -> i64 { 0 } fn SetLongLongAttribute(&self, _: i64) {} fn UnsignedLongLongAttribute(&self) -> u64 { 0 } fn SetUnsignedLongLongAttribute(&self, _: u64) {} fn UnrestrictedFloatAttribute(&self) -> f32 { 0. } fn SetUnrestrictedFloatAttribute(&self, _: f32) {} fn FloatAttribute(&self) -> Finite<f32> { Finite::wrap(0.) } fn SetFloatAttribute(&self, _: Finite<f32>) {} fn UnrestrictedDoubleAttribute(&self) -> f64 { 0. } fn SetUnrestrictedDoubleAttribute(&self, _: f64) {} fn DoubleAttribute(&self) -> Finite<f64> { Finite::wrap(0.) } fn SetDoubleAttribute(&self, _: Finite<f64>) {} fn StringAttribute(&self) -> DOMString { DOMString::new() } fn SetStringAttribute(&self, _: DOMString) {} fn UsvstringAttribute(&self) -> USVString { USVString("".to_owned()) } fn SetUsvstringAttribute(&self, _: USVString) {} fn ByteStringAttribute(&self) -> ByteString { ByteString::new(vec!()) } fn SetByteStringAttribute(&self, _: ByteString) {} fn EnumAttribute(&self) -> TestEnum { TestEnum::_empty } fn SetEnumAttribute(&self, _: TestEnum) {} fn InterfaceAttribute(&self) -> Root<Blob> { Blob::new(&self.global(), BlobImpl::new_from_bytes(vec![]), "".to_owned()) } fn SetInterfaceAttribute(&self, _: &Blob) {} fn UnionAttribute(&self) -> HTMLElementOrLong { HTMLElementOrLong::Long(0) } fn SetUnionAttribute(&self, _: HTMLElementOrLong) {} fn Union2Attribute(&self) -> EventOrString { EventOrString::String(DOMString::new()) } fn SetUnion2Attribute(&self, _: EventOrString) {} fn Union3Attribute(&self) -> EventOrUSVString { EventOrUSVString::USVString(USVString("".to_owned())) } fn SetUnion3Attribute(&self, _: EventOrUSVString) {} fn Union4Attribute(&self) -> StringOrUnsignedLong { StringOrUnsignedLong::UnsignedLong(0u32) } fn SetUnion4Attribute(&self, _: StringOrUnsignedLong) {} fn Union5Attribute(&self) -> StringOrBoolean { StringOrBoolean::Boolean(true) } fn SetUnion5Attribute(&self, _: StringOrBoolean) {} fn Union6Attribute(&self) -> UnsignedLongOrBoolean { UnsignedLongOrBoolean::Boolean(true) } fn SetUnion6Attribute(&self, _: UnsignedLongOrBoolean) {} fn Union7Attribute(&self) -> BlobOrBoolean { BlobOrBoolean::Boolean(true) } fn SetUnion7Attribute(&self, _: BlobOrBoolean) {} fn Union8Attribute(&self) -> BlobOrUnsignedLong { BlobOrUnsignedLong::UnsignedLong(0u32) } fn SetUnion8Attribute(&self, _: BlobOrUnsignedLong) {} fn Union9Attribute(&self) -> ByteStringOrLong { ByteStringOrLong::ByteString(ByteString::new(vec!())) } fn SetUnion9Attribute(&self, _: ByteStringOrLong) {} #[allow(unsafe_code)] unsafe fn ArrayAttribute(&self, cx: *mut JSContext) -> NonZero<*mut JSObject> { rooted!(in(cx) let array = JS_NewUint8ClampedArray(cx, 16)); assert!(!array.is_null()); NonZero::new(array.get()) } #[allow(unsafe_code)] unsafe fn AnyAttribute(&self, _: *mut JSContext) -> JSVal { NullValue() } #[allow(unsafe_code)] unsafe fn SetAnyAttribute(&self, _: *mut JSContext, _: HandleValue) {} #[allow(unsafe_code)] unsafe fn ObjectAttribute(&self, cx: *mut JSContext) -> NonZero<*mut JSObject> { rooted!(in(cx) let obj = JS_NewPlainObject(cx)); assert!(!obj.is_null());<|fim▁hole|> NonZero::new(obj.get()) } #[allow(unsafe_code)] unsafe fn SetObjectAttribute(&self, _: *mut JSContext, _: *mut JSObject) {} fn GetBooleanAttributeNullable(&self) -> Option<bool> { Some(false) } fn SetBooleanAttributeNullable(&self, _: Option<bool>) {} fn GetByteAttributeNullable(&self) -> Option<i8> { Some(0) } fn SetByteAttributeNullable(&self, _: Option<i8>) {} fn GetOctetAttributeNullable(&self) -> Option<u8> { Some(0) } fn SetOctetAttributeNullable(&self, _: Option<u8>) {} fn GetShortAttributeNullable(&self) -> Option<i16> { Some(0) } fn SetShortAttributeNullable(&self, _: Option<i16>) {} fn GetUnsignedShortAttributeNullable(&self) -> Option<u16> { Some(0) } fn SetUnsignedShortAttributeNullable(&self, _: Option<u16>) {} fn GetLongAttributeNullable(&self) -> Option<i32> { Some(0) } fn SetLongAttributeNullable(&self, _: Option<i32>) {} fn GetUnsignedLongAttributeNullable(&self) -> Option<u32> { Some(0) } fn SetUnsignedLongAttributeNullable(&self, _: Option<u32>) {} fn GetLongLongAttributeNullable(&self) -> Option<i64> { Some(0) } fn SetLongLongAttributeNullable(&self, _: Option<i64>) {} fn GetUnsignedLongLongAttributeNullable(&self) -> Option<u64> { Some(0) } fn SetUnsignedLongLongAttributeNullable(&self, _: Option<u64>) {} fn GetUnrestrictedFloatAttributeNullable(&self) -> Option<f32> { Some(0.) } fn SetUnrestrictedFloatAttributeNullable(&self, _: Option<f32>) {} fn GetFloatAttributeNullable(&self) -> Option<Finite<f32>> { Some(Finite::wrap(0.)) } fn SetFloatAttributeNullable(&self, _: Option<Finite<f32>>) {} fn GetUnrestrictedDoubleAttributeNullable(&self) -> Option<f64> { Some(0.) } fn SetUnrestrictedDoubleAttributeNullable(&self, _: Option<f64>) {} fn GetDoubleAttributeNullable(&self) -> Option<Finite<f64>> { Some(Finite::wrap(0.)) } fn SetDoubleAttributeNullable(&self, _: Option<Finite<f64>>) {} fn GetByteStringAttributeNullable(&self) -> Option<ByteString> { Some(ByteString::new(vec!())) } fn SetByteStringAttributeNullable(&self, _: Option<ByteString>) {} fn GetStringAttributeNullable(&self) -> Option<DOMString> { Some(DOMString::new()) } fn SetStringAttributeNullable(&self, _: Option<DOMString>) {} fn GetUsvstringAttributeNullable(&self) -> Option<USVString> { Some(USVString("".to_owned())) } fn SetUsvstringAttributeNullable(&self, _: Option<USVString>) {} fn SetBinaryRenamedAttribute(&self, _: DOMString) {} fn ForwardedAttribute(&self) -> Root<TestBinding> { Root::from_ref(self) } fn BinaryRenamedAttribute(&self) -> DOMString { DOMString::new() } fn SetBinaryRenamedAttribute2(&self, _: DOMString) {} fn BinaryRenamedAttribute2(&self) -> DOMString { DOMString::new() } fn Attr_to_automatically_rename(&self) -> DOMString { DOMString::new() } fn SetAttr_to_automatically_rename(&self, _: DOMString) {} fn GetEnumAttributeNullable(&self) -> Option<TestEnum> { Some(TestEnum::_empty) } fn GetInterfaceAttributeNullable(&self) -> Option<Root<Blob>> { Some(Blob::new(&self.global(), BlobImpl::new_from_bytes(vec![]), "".to_owned())) } fn SetInterfaceAttributeNullable(&self, _: Option<&Blob>) {} fn GetInterfaceAttributeWeak(&self) -> Option<Root<URL>> { self.url.root() } fn SetInterfaceAttributeWeak(&self, url: Option<&URL>) { self.url.set(url); } #[allow(unsafe_code)] unsafe fn GetObjectAttributeNullable(&self, _: *mut JSContext) -> Option<NonZero<*mut JSObject>> { None } #[allow(unsafe_code)] unsafe fn SetObjectAttributeNullable(&self, _: *mut JSContext, _: *mut JSObject) {} fn GetUnionAttributeNullable(&self) -> Option<HTMLElementOrLong> { Some(HTMLElementOrLong::Long(0)) } fn SetUnionAttributeNullable(&self, _: Option<HTMLElementOrLong>) {} fn GetUnion2AttributeNullable(&self) -> Option<EventOrString> { Some(EventOrString::String(DOMString::new())) } fn SetUnion2AttributeNullable(&self, _: Option<EventOrString>) {} fn GetUnion3AttributeNullable(&self) -> Option<BlobOrBoolean> { Some(BlobOrBoolean::Boolean(true)) } fn SetUnion3AttributeNullable(&self, _: Option<BlobOrBoolean>) {} fn GetUnion4AttributeNullable(&self) -> Option<UnsignedLongOrBoolean> { Some(UnsignedLongOrBoolean::Boolean(true)) } fn SetUnion4AttributeNullable(&self, _: Option<UnsignedLongOrBoolean>) {} fn GetUnion5AttributeNullable(&self) -> Option<StringOrBoolean> { Some(StringOrBoolean::Boolean(true)) } fn SetUnion5AttributeNullable(&self, _: Option<StringOrBoolean>) {} fn GetUnion6AttributeNullable(&self) -> Option<ByteStringOrLong> { Some(ByteStringOrLong::ByteString(ByteString::new(vec!()))) } fn SetUnion6AttributeNullable(&self, _: Option<ByteStringOrLong>) {} fn BinaryRenamedMethod(&self) -> () {} fn ReceiveVoid(&self) -> () {} fn ReceiveBoolean(&self) -> bool { false } fn ReceiveByte(&self) -> i8 { 0 } fn ReceiveOctet(&self) -> u8 { 0 } fn ReceiveShort(&self) -> i16 { 0 } fn ReceiveUnsignedShort(&self) -> u16 { 0 } fn ReceiveLong(&self) -> i32 { 0 } fn ReceiveUnsignedLong(&self) -> u32 { 0 } fn ReceiveLongLong(&self) -> i64 { 0 } fn ReceiveUnsignedLongLong(&self) -> u64 { 0 } fn ReceiveUnrestrictedFloat(&self) -> f32 { 0. } fn ReceiveFloat(&self) -> Finite<f32> { Finite::wrap(0.) } fn ReceiveUnrestrictedDouble(&self) -> f64 { 0. } fn ReceiveDouble(&self) -> Finite<f64> { Finite::wrap(0.) } fn ReceiveString(&self) -> DOMString { DOMString::new() } fn ReceiveUsvstring(&self) -> USVString { USVString("".to_owned()) } fn ReceiveByteString(&self) -> ByteString { ByteString::new(vec!()) } fn ReceiveEnum(&self) -> TestEnum { TestEnum::_empty } fn ReceiveInterface(&self) -> Root<Blob> { Blob::new(&self.global(), BlobImpl::new_from_bytes(vec![]), "".to_owned()) } #[allow(unsafe_code)] unsafe fn ReceiveAny(&self, _: *mut JSContext) -> JSVal { NullValue() } #[allow(unsafe_code)] unsafe fn ReceiveObject(&self, cx: *mut JSContext) -> NonZero<*mut JSObject> { self.ObjectAttribute(cx) } fn ReceiveUnion(&self) -> HTMLElementOrLong { HTMLElementOrLong::Long(0) } fn ReceiveUnion2(&self) -> EventOrString { EventOrString::String(DOMString::new()) } fn ReceiveUnion3(&self) -> StringOrLongSequence { StringOrLongSequence::LongSequence(vec![]) } fn ReceiveUnion4(&self) -> StringOrStringSequence { StringOrStringSequence::StringSequence(vec![]) } fn ReceiveUnion5(&self) -> BlobOrBlobSequence { BlobOrBlobSequence::BlobSequence(vec![]) } fn ReceiveUnion6(&self) -> StringOrUnsignedLong { StringOrUnsignedLong::String(DOMString::new()) } fn ReceiveUnion7(&self) -> StringOrBoolean { StringOrBoolean::Boolean(true) } fn ReceiveUnion8(&self) -> UnsignedLongOrBoolean { UnsignedLongOrBoolean::UnsignedLong(0u32) } fn ReceiveUnion9(&self) -> HTMLElementOrUnsignedLongOrStringOrBoolean { HTMLElementOrUnsignedLongOrStringOrBoolean::Boolean(true) } fn ReceiveUnion10(&self) -> ByteStringOrLong { ByteStringOrLong::ByteString(ByteString::new(vec!())) } fn ReceiveUnion11(&self) -> ByteStringSequenceOrLongOrString { ByteStringSequenceOrLongOrString::ByteStringSequence(vec!(ByteString::new(vec!()))) } fn ReceiveSequence(&self) -> Vec<i32> { vec![1] } fn ReceiveInterfaceSequence(&self) -> Vec<Root<Blob>> { vec![Blob::new(&self.global(), BlobImpl::new_from_bytes(vec![]), "".to_owned())] } fn ReceiveNullableBoolean(&self) -> Option<bool> { Some(false) } fn ReceiveNullableByte(&self) -> Option<i8> { Some(0) } fn ReceiveNullableOctet(&self) -> Option<u8> { Some(0) } fn ReceiveNullableShort(&self) -> Option<i16> { Some(0) } fn ReceiveNullableUnsignedShort(&self) -> Option<u16> { Some(0) } fn ReceiveNullableLong(&self) -> Option<i32> { Some(0) } fn ReceiveNullableUnsignedLong(&self) -> Option<u32> { Some(0) } fn ReceiveNullableLongLong(&self) -> Option<i64> { Some(0) } fn ReceiveNullableUnsignedLongLong(&self) -> Option<u64> { Some(0) } fn ReceiveNullableUnrestrictedFloat(&self) -> Option<f32> { Some(0.) } fn ReceiveNullableFloat(&self) -> Option<Finite<f32>> { Some(Finite::wrap(0.)) } fn ReceiveNullableUnrestrictedDouble(&self) -> Option<f64> { Some(0.) } fn ReceiveNullableDouble(&self) -> Option<Finite<f64>> { Some(Finite::wrap(0.)) } fn ReceiveNullableString(&self) -> Option<DOMString> { Some(DOMString::new()) } fn ReceiveNullableUsvstring(&self) -> Option<USVString> { Some(USVString("".to_owned())) } fn ReceiveNullableByteString(&self) -> Option<ByteString> { Some(ByteString::new(vec!())) } fn ReceiveNullableEnum(&self) -> Option<TestEnum> { Some(TestEnum::_empty) } fn ReceiveNullableInterface(&self) -> Option<Root<Blob>> { Some(Blob::new(&self.global(), BlobImpl::new_from_bytes(vec![]), "".to_owned())) } #[allow(unsafe_code)] unsafe fn ReceiveNullableObject(&self, cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> { self.GetObjectAttributeNullable(cx) } fn ReceiveNullableUnion(&self) -> Option<HTMLElementOrLong> { Some(HTMLElementOrLong::Long(0)) } fn ReceiveNullableUnion2(&self) -> Option<EventOrString> { Some(EventOrString::String(DOMString::new())) } fn ReceiveNullableUnion3(&self) -> Option<StringOrLongSequence> { Some(StringOrLongSequence::String(DOMString::new())) } fn ReceiveNullableUnion4(&self) -> Option<LongSequenceOrBoolean> { Some(LongSequenceOrBoolean::Boolean(true)) } fn ReceiveNullableUnion5(&self) -> Option<UnsignedLongOrBoolean> { Some(UnsignedLongOrBoolean::UnsignedLong(0u32)) } fn ReceiveNullableUnion6(&self) -> Option<ByteStringOrLong> { Some(ByteStringOrLong::ByteString(ByteString::new(vec!()))) } fn ReceiveNullableSequence(&self) -> Option<Vec<i32>> { Some(vec![1]) } fn ReceiveTestDictionaryWithSuccessOnKeyword(&self) -> TestDictionary { TestDictionary { anyValue: NullValue(), booleanValue: None, byteValue: None, dict: TestDictionaryDefaults { UnrestrictedDoubleValue: 0.0, anyValue: NullValue(), booleanValue: false, bytestringValue: ByteString::new(vec![]), byteValue: 0, doubleValue: Finite::new(1.0).unwrap(), enumValue: TestEnum::Foo, floatValue: Finite::new(1.0).unwrap(), longLongValue: 54, longValue: 12, nullableBooleanValue: None, nullableBytestringValue: None, nullableByteValue: None, nullableDoubleValue: None, nullableFloatValue: None, nullableLongLongValue: None, nullableLongValue: None, nullableObjectValue: ptr::null_mut(), nullableOctetValue: None, nullableShortValue: None, nullableStringValue: None, nullableUnrestrictedDoubleValue: None, nullableUnrestrictedFloatValue: None, nullableUnsignedLongLongValue: None, nullableUnsignedLongValue: None, nullableUnsignedShortValue: None, nullableUsvstringValue: None, octetValue: 0, shortValue: 0, stringValue: DOMString::new(), unrestrictedFloatValue: 0.0, unsignedLongLongValue: 0, unsignedLongValue: 0, unsignedShortValue: 0, usvstringValue: USVString("".to_owned()), }, doubleValue: None, enumValue: None, floatValue: None, interfaceValue: None, longLongValue: None, longValue: None, objectValue: None, octetValue: None, requiredValue: true, seqDict: None, shortValue: None, stringValue: None, type_: Some(DOMString::from("success")), unrestrictedDoubleValue: None, unrestrictedFloatValue: None, unsignedLongLongValue: None, unsignedLongValue: None, unsignedShortValue: None, usvstringValue: None, nonRequiredNullable: None, nonRequiredNullable2: Some(None), // null } } fn DictMatchesPassedValues(&self, arg: &TestDictionary) -> bool { arg.type_.as_ref().map(|s| s == "success").unwrap_or(false) && arg.nonRequiredNullable.is_none() && arg.nonRequiredNullable2 == Some(None) } fn PassBoolean(&self, _: bool) {} fn PassByte(&self, _: i8) {} fn PassOctet(&self, _: u8) {} fn PassShort(&self, _: i16) {} fn PassUnsignedShort(&self, _: u16) {} fn PassLong(&self, _: i32) {} fn PassUnsignedLong(&self, _: u32) {} fn PassLongLong(&self, _: i64) {} fn PassUnsignedLongLong(&self, _: u64) {} fn PassUnrestrictedFloat(&self, _: f32) {} fn PassFloat(&self, _: Finite<f32>) {} fn PassUnrestrictedDouble(&self, _: f64) {} fn PassDouble(&self, _: Finite<f64>) {} fn PassString(&self, _: DOMString) {} fn PassUsvstring(&self, _: USVString) {} fn PassByteString(&self, _: ByteString) {} fn PassEnum(&self, _: TestEnum) {} fn PassInterface(&self, _: &Blob) {} fn PassUnion(&self, _: HTMLElementOrLong) {} fn PassUnion2(&self, _: EventOrString) {} fn PassUnion3(&self, _: BlobOrString) {} fn PassUnion4(&self, _: StringOrStringSequence) {} fn PassUnion5(&self, _: StringOrBoolean) {} fn PassUnion6(&self, _: UnsignedLongOrBoolean) {} fn PassUnion7(&self, _: StringSequenceOrUnsignedLong) {} fn PassUnion8(&self, _: ByteStringSequenceOrLong) {} fn PassUnionWithTypedef(&self, _: DocumentOrTestTypedef) {} fn PassUnionWithTypedef2(&self, _: LongSequenceOrTestTypedef) {} #[allow(unsafe_code)] unsafe fn PassAny(&self, _: *mut JSContext, _: HandleValue) {} #[allow(unsafe_code)] unsafe fn PassObject(&self, _: *mut JSContext, _: *mut JSObject) {} fn PassCallbackFunction(&self, _: Rc<Function>) {} fn PassCallbackInterface(&self, _: Rc<EventListener>) {} fn PassSequence(&self, _: Vec<i32>) {} fn PassStringSequence(&self, _: Vec<DOMString>) {} fn PassInterfaceSequence(&self, _: Vec<Root<Blob>>) {} fn PassNullableBoolean(&self, _: Option<bool>) {} fn PassNullableByte(&self, _: Option<i8>) {} fn PassNullableOctet(&self, _: Option<u8>) {} fn PassNullableShort(&self, _: Option<i16>) {} fn PassNullableUnsignedShort(&self, _: Option<u16>) {} fn PassNullableLong(&self, _: Option<i32>) {} fn PassNullableUnsignedLong(&self, _: Option<u32>) {} fn PassNullableLongLong(&self, _: Option<i64>) {} fn PassNullableUnsignedLongLong(&self, _: Option<u64>) {} fn PassNullableUnrestrictedFloat(&self, _: Option<f32>) {} fn PassNullableFloat(&self, _: Option<Finite<f32>>) {} fn PassNullableUnrestrictedDouble(&self, _: Option<f64>) {} fn PassNullableDouble(&self, _: Option<Finite<f64>>) {} fn PassNullableString(&self, _: Option<DOMString>) {} fn PassNullableUsvstring(&self, _: Option<USVString>) {} fn PassNullableByteString(&self, _: Option<ByteString>) {} // fn PassNullableEnum(self, _: Option<TestEnum>) {} fn PassNullableInterface(&self, _: Option<&Blob>) {} #[allow(unsafe_code)] unsafe fn PassNullableObject(&self, _: *mut JSContext, _: *mut JSObject) {} fn PassNullableUnion(&self, _: Option<HTMLElementOrLong>) {} fn PassNullableUnion2(&self, _: Option<EventOrString>) {} fn PassNullableUnion3(&self, _: Option<StringOrLongSequence>) {} fn PassNullableUnion4(&self, _: Option<LongSequenceOrBoolean>) {} fn PassNullableUnion5(&self, _: Option<UnsignedLongOrBoolean>) {} fn PassNullableUnion6(&self, _: Option<ByteStringOrLong>) {} fn PassNullableCallbackFunction(&self, _: Option<Rc<Function>>) {} fn PassNullableCallbackInterface(&self, _: Option<Rc<EventListener>>) {} fn PassNullableSequence(&self, _: Option<Vec<i32>>) {} fn PassOptionalBoolean(&self, _: Option<bool>) {} fn PassOptionalByte(&self, _: Option<i8>) {} fn PassOptionalOctet(&self, _: Option<u8>) {} fn PassOptionalShort(&self, _: Option<i16>) {} fn PassOptionalUnsignedShort(&self, _: Option<u16>) {} fn PassOptionalLong(&self, _: Option<i32>) {} fn PassOptionalUnsignedLong(&self, _: Option<u32>) {} fn PassOptionalLongLong(&self, _: Option<i64>) {} fn PassOptionalUnsignedLongLong(&self, _: Option<u64>) {} fn PassOptionalUnrestrictedFloat(&self, _: Option<f32>) {} fn PassOptionalFloat(&self, _: Option<Finite<f32>>) {} fn PassOptionalUnrestrictedDouble(&self, _: Option<f64>) {} fn PassOptionalDouble(&self, _: Option<Finite<f64>>) {} fn PassOptionalString(&self, _: Option<DOMString>) {} fn PassOptionalUsvstring(&self, _: Option<USVString>) {} fn PassOptionalByteString(&self, _: Option<ByteString>) {} fn PassOptionalEnum(&self, _: Option<TestEnum>) {} fn PassOptionalInterface(&self, _: Option<&Blob>) {} fn PassOptionalUnion(&self, _: Option<HTMLElementOrLong>) {} fn PassOptionalUnion2(&self, _: Option<EventOrString>) {} fn PassOptionalUnion3(&self, _: Option<StringOrLongSequence>) {} fn PassOptionalUnion4(&self, _: Option<LongSequenceOrBoolean>) {} fn PassOptionalUnion5(&self, _: Option<UnsignedLongOrBoolean>) {} fn PassOptionalUnion6(&self, _: Option<ByteStringOrLong>) {} #[allow(unsafe_code)] unsafe fn PassOptionalAny(&self, _: *mut JSContext, _: HandleValue) {} #[allow(unsafe_code)] unsafe fn PassOptionalObject(&self, _: *mut JSContext, _: Option<*mut JSObject>) {} fn PassOptionalCallbackFunction(&self, _: Option<Rc<Function>>) {} fn PassOptionalCallbackInterface(&self, _: Option<Rc<EventListener>>) {} fn PassOptionalSequence(&self, _: Option<Vec<i32>>) {} fn PassOptionalNullableBoolean(&self, _: Option<Option<bool>>) {} fn PassOptionalNullableByte(&self, _: Option<Option<i8>>) {} fn PassOptionalNullableOctet(&self, _: Option<Option<u8>>) {} fn PassOptionalNullableShort(&self, _: Option<Option<i16>>) {} fn PassOptionalNullableUnsignedShort(&self, _: Option<Option<u16>>) {} fn PassOptionalNullableLong(&self, _: Option<Option<i32>>) {} fn PassOptionalNullableUnsignedLong(&self, _: Option<Option<u32>>) {} fn PassOptionalNullableLongLong(&self, _: Option<Option<i64>>) {} fn PassOptionalNullableUnsignedLongLong(&self, _: Option<Option<u64>>) {} fn PassOptionalNullableUnrestrictedFloat(&self, _: Option<Option<f32>>) {} fn PassOptionalNullableFloat(&self, _: Option<Option<Finite<f32>>>) {} fn PassOptionalNullableUnrestrictedDouble(&self, _: Option<Option<f64>>) {} fn PassOptionalNullableDouble(&self, _: Option<Option<Finite<f64>>>) {} fn PassOptionalNullableString(&self, _: Option<Option<DOMString>>) {} fn PassOptionalNullableUsvstring(&self, _: Option<Option<USVString>>) {} fn PassOptionalNullableByteString(&self, _: Option<Option<ByteString>>) {} // fn PassOptionalNullableEnum(self, _: Option<Option<TestEnum>>) {} fn PassOptionalNullableInterface(&self, _: Option<Option<&Blob>>) {} #[allow(unsafe_code)] unsafe fn PassOptionalNullableObject(&self, _: *mut JSContext, _: Option<*mut JSObject>) {} fn PassOptionalNullableUnion(&self, _: Option<Option<HTMLElementOrLong>>) {} fn PassOptionalNullableUnion2(&self, _: Option<Option<EventOrString>>) {} fn PassOptionalNullableUnion3(&self, _: Option<Option<StringOrLongSequence>>) {} fn PassOptionalNullableUnion4(&self, _: Option<Option<LongSequenceOrBoolean>>) {} fn PassOptionalNullableUnion5(&self, _: Option<Option<UnsignedLongOrBoolean>>) {} fn PassOptionalNullableUnion6(&self, _: Option<Option<ByteStringOrLong>>) {} fn PassOptionalNullableCallbackFunction(&self, _: Option<Option<Rc<Function>>>) {} fn PassOptionalNullableCallbackInterface(&self, _: Option<Option<Rc<EventListener>>>) {} fn PassOptionalNullableSequence(&self, _: Option<Option<Vec<i32>>>) {} fn PassOptionalBooleanWithDefault(&self, _: bool) {} fn PassOptionalByteWithDefault(&self, _: i8) {} fn PassOptionalOctetWithDefault(&self, _: u8) {} fn PassOptionalShortWithDefault(&self, _: i16) {} fn PassOptionalUnsignedShortWithDefault(&self, _: u16) {} fn PassOptionalLongWithDefault(&self, _: i32) {} fn PassOptionalUnsignedLongWithDefault(&self, _: u32) {} fn PassOptionalLongLongWithDefault(&self, _: i64) {} fn PassOptionalUnsignedLongLongWithDefault(&self, _: u64) {} fn PassOptionalStringWithDefault(&self, _: DOMString) {} fn PassOptionalUsvstringWithDefault(&self, _: USVString) {} fn PassOptionalBytestringWithDefault(&self, _: ByteString) {} fn PassOptionalEnumWithDefault(&self, _: TestEnum) {} fn PassOptionalNullableBooleanWithDefault(&self, _: Option<bool>) {} fn PassOptionalNullableByteWithDefault(&self, _: Option<i8>) {} fn PassOptionalNullableOctetWithDefault(&self, _: Option<u8>) {} fn PassOptionalNullableShortWithDefault(&self, _: Option<i16>) {} fn PassOptionalNullableUnsignedShortWithDefault(&self, _: Option<u16>) {} fn PassOptionalNullableLongWithDefault(&self, _: Option<i32>) {} fn PassOptionalNullableUnsignedLongWithDefault(&self, _: Option<u32>) {} fn PassOptionalNullableLongLongWithDefault(&self, _: Option<i64>) {} fn PassOptionalNullableUnsignedLongLongWithDefault(&self, _: Option<u64>) {} // fn PassOptionalNullableUnrestrictedFloatWithDefault(self, _: Option<f32>) {} // fn PassOptionalNullableFloatWithDefault(self, _: Option<Finite<f32>>) {} // fn PassOptionalNullableUnrestrictedDoubleWithDefault(self, _: Option<f64>) {} // fn PassOptionalNullableDoubleWithDefault(self, _: Option<Finite<f64>>) {} fn PassOptionalNullableStringWithDefault(&self, _: Option<DOMString>) {} fn PassOptionalNullableUsvstringWithDefault(&self, _: Option<USVString>) {} fn PassOptionalNullableByteStringWithDefault(&self, _: Option<ByteString>) {} // fn PassOptionalNullableEnumWithDefault(self, _: Option<TestEnum>) {} fn PassOptionalNullableInterfaceWithDefault(&self, _: Option<&Blob>) {} #[allow(unsafe_code)] unsafe fn PassOptionalNullableObjectWithDefault(&self, _: *mut JSContext, _: *mut JSObject) {} fn PassOptionalNullableUnionWithDefault(&self, _: Option<HTMLElementOrLong>) {} fn PassOptionalNullableUnion2WithDefault(&self, _: Option<EventOrString>) {} // fn PassOptionalNullableCallbackFunctionWithDefault(self, _: Option<Function>) {} fn PassOptionalNullableCallbackInterfaceWithDefault(&self, _: Option<Rc<EventListener>>) {} #[allow(unsafe_code)] unsafe fn PassOptionalAnyWithDefault(&self, _: *mut JSContext, _: HandleValue) {} fn PassOptionalNullableBooleanWithNonNullDefault(&self, _: Option<bool>) {} fn PassOptionalNullableByteWithNonNullDefault(&self, _: Option<i8>) {} fn PassOptionalNullableOctetWithNonNullDefault(&self, _: Option<u8>) {} fn PassOptionalNullableShortWithNonNullDefault(&self, _: Option<i16>) {} fn PassOptionalNullableUnsignedShortWithNonNullDefault(&self, _: Option<u16>) {} fn PassOptionalNullableLongWithNonNullDefault(&self, _: Option<i32>) {} fn PassOptionalNullableUnsignedLongWithNonNullDefault(&self, _: Option<u32>) {} fn PassOptionalNullableLongLongWithNonNullDefault(&self, _: Option<i64>) {} fn PassOptionalNullableUnsignedLongLongWithNonNullDefault(&self, _: Option<u64>) {} // fn PassOptionalNullableUnrestrictedFloatWithNonNullDefault(self, _: Option<f32>) {} // fn PassOptionalNullableFloatWithNonNullDefault(self, _: Option<Finite<f32>>) {} // fn PassOptionalNullableUnrestrictedDoubleWithNonNullDefault(self, _: Option<f64>) {} // fn PassOptionalNullableDoubleWithNonNullDefault(self, _: Option<Finite<f64>>) {} fn PassOptionalNullableStringWithNonNullDefault(&self, _: Option<DOMString>) {} fn PassOptionalNullableUsvstringWithNonNullDefault(&self, _: Option<USVString>) {} // fn PassOptionalNullableEnumWithNonNullDefault(self, _: Option<TestEnum>) {} fn PassVariadicBoolean(&self, _: Vec<bool>) {} fn PassVariadicBooleanAndDefault(&self, _: bool, _: Vec<bool>) {} fn PassVariadicByte(&self, _: Vec<i8>) {} fn PassVariadicOctet(&self, _: Vec<u8>) {} fn PassVariadicShort(&self, _: Vec<i16>) {} fn PassVariadicUnsignedShort(&self, _: Vec<u16>) {} fn PassVariadicLong(&self, _: Vec<i32>) {} fn PassVariadicUnsignedLong(&self, _: Vec<u32>) {} fn PassVariadicLongLong(&self, _: Vec<i64>) {} fn PassVariadicUnsignedLongLong(&self, _: Vec<u64>) {} fn PassVariadicUnrestrictedFloat(&self, _: Vec<f32>) {} fn PassVariadicFloat(&self, _: Vec<Finite<f32>>) {} fn PassVariadicUnrestrictedDouble(&self, _: Vec<f64>) {} fn PassVariadicDouble(&self, _: Vec<Finite<f64>>) {} fn PassVariadicString(&self, _: Vec<DOMString>) {} fn PassVariadicUsvstring(&self, _: Vec<USVString>) {} fn PassVariadicByteString(&self, _: Vec<ByteString>) {} fn PassVariadicEnum(&self, _: Vec<TestEnum>) {} fn PassVariadicInterface(&self, _: &[&Blob]) {} fn PassVariadicUnion(&self, _: Vec<HTMLElementOrLong>) {} fn PassVariadicUnion2(&self, _: Vec<EventOrString>) {} fn PassVariadicUnion3(&self, _: Vec<BlobOrString>) {} fn PassVariadicUnion4(&self, _: Vec<BlobOrBoolean>) {} fn PassVariadicUnion5(&self, _: Vec<StringOrUnsignedLong>) {} fn PassVariadicUnion6(&self, _: Vec<UnsignedLongOrBoolean>) {} fn PassVariadicUnion7(&self, _: Vec<ByteStringOrLong>) {} #[allow(unsafe_code)] unsafe fn PassVariadicAny(&self, _: *mut JSContext, _: Vec<HandleValue>) {} #[allow(unsafe_code)] unsafe fn PassVariadicObject(&self, _: *mut JSContext, _: Vec<*mut JSObject>) {} fn BooleanMozPreference(&self, pref_name: DOMString) -> bool { PREFS.get(pref_name.as_ref()).as_boolean().unwrap_or(false) } fn StringMozPreference(&self, pref_name: DOMString) -> DOMString { PREFS.get(pref_name.as_ref()).as_string().map(|s| DOMString::from(s)).unwrap_or_else(|| DOMString::new()) } fn PrefControlledAttributeDisabled(&self) -> bool { false } fn PrefControlledAttributeEnabled(&self) -> bool { false } fn PrefControlledMethodDisabled(&self) {} fn PrefControlledMethodEnabled(&self) {} fn FuncControlledAttributeDisabled(&self) -> bool { false } fn FuncControlledAttributeEnabled(&self) -> bool { false } fn FuncControlledMethodDisabled(&self) {} fn FuncControlledMethodEnabled(&self) {} fn PassMozMap(&self, _: MozMap<i32>) {} fn PassNullableMozMap(&self, _: Option<MozMap<i32> >) {} fn PassMozMapOfNullableInts(&self, _: MozMap<Option<i32>>) {} fn PassOptionalMozMapOfNullableInts(&self, _: Option<MozMap<Option<i32>>>) {} fn PassOptionalNullableMozMapOfNullableInts(&self, _: Option<Option<MozMap<Option<i32>> >>) {} fn PassCastableObjectMozMap(&self, _: MozMap<Root<TestBinding>>) {} fn PassNullableCastableObjectMozMap(&self, _: MozMap<Option<Root<TestBinding>>>) {} fn PassCastableObjectNullableMozMap(&self, _: Option<MozMap<Root<TestBinding>>>) {} fn PassNullableCastableObjectNullableMozMap(&self, _: Option<MozMap<Option<Root<TestBinding>>>>) {} fn PassOptionalMozMap(&self, _: Option<MozMap<i32>>) {} fn PassOptionalNullableMozMap(&self, _: Option<Option<MozMap<i32>>>) {} fn PassOptionalNullableMozMapWithDefaultValue(&self, _: Option<MozMap<i32>>) {} fn PassOptionalObjectMozMap(&self, _: Option<MozMap<Root<TestBinding>>>) {} fn PassStringMozMap(&self, _: MozMap<DOMString>) {} fn PassByteStringMozMap(&self, _: MozMap<ByteString>) {} fn PassMozMapOfMozMaps(&self, _: MozMap<MozMap<i32>>) {} fn PassMozMapUnion(&self, _: UnionTypes::LongOrByteStringMozMap) {} fn PassMozMapUnion2(&self, _: UnionTypes::TestBindingOrByteStringMozMap) {} fn PassMozMapUnion3(&self, _: UnionTypes::TestBindingOrByteStringSequenceSequenceOrByteStringMozMap) {} fn ReceiveMozMap(&self) -> MozMap<i32> { MozMap::new() } fn ReceiveNullableMozMap(&self) -> Option<MozMap<i32>> { Some(MozMap::new()) } fn ReceiveMozMapOfNullableInts(&self) -> MozMap<Option<i32>> { MozMap::new() } fn ReceiveNullableMozMapOfNullableInts(&self) -> Option<MozMap<Option<i32>>> { Some(MozMap::new()) } fn ReceiveMozMapOfMozMaps(&self) -> MozMap<MozMap<i32>> { MozMap::new() } fn ReceiveAnyMozMap(&self) -> MozMap<JSVal> { MozMap::new() } #[allow(unrooted_must_root)] #[allow(unsafe_code)] unsafe fn ReturnResolvedPromise(&self, cx: *mut JSContext, v: HandleValue) -> Fallible<Rc<Promise>> { Promise::Resolve(&self.global(), cx, v) } #[allow(unrooted_must_root)] #[allow(unsafe_code)] unsafe fn ReturnRejectedPromise(&self, cx: *mut JSContext, v: HandleValue) -> Fallible<Rc<Promise>> { Promise::Reject(&self.global(), cx, v) } #[allow(unsafe_code)] unsafe fn PromiseResolveNative(&self, cx: *mut JSContext, p: &Promise, v: HandleValue) { p.resolve(cx, v); } #[allow(unsafe_code)] unsafe fn PromiseRejectNative(&self, cx: *mut JSContext, p: &Promise, v: HandleValue) { p.reject(cx, v); } fn PromiseRejectWithTypeError(&self, p: &Promise, s: USVString) { p.reject_error(self.global().get_cx(), Error::Type(s.0)); } #[allow(unrooted_must_root)] fn ResolvePromiseDelayed(&self, p: &Promise, value: DOMString, delay: u64) { let promise = p.duplicate(); let cb = TestBindingCallback { promise: TrustedPromise::new(promise), value: value, }; let _ = self.global() .schedule_callback( OneshotTimerCallback::TestBindingCallback(cb), MsDuration::new(delay)); } #[allow(unrooted_must_root)] fn PromiseNativeHandler(&self, resolve: Option<Rc<SimpleCallback>>, reject: Option<Rc<SimpleCallback>>) -> Rc<Promise> { let global = self.global(); let handler = PromiseNativeHandler::new(&global, resolve.map(SimpleHandler::new), reject.map(SimpleHandler::new)); let p = Promise::new(&global); p.append_native_handler(&handler); return p; #[derive(JSTraceable, HeapSizeOf)] struct SimpleHandler { #[ignore_heap_size_of = "Rc has unclear ownership semantics"] handler: Rc<SimpleCallback>, } impl SimpleHandler { fn new(callback: Rc<SimpleCallback>) -> Box<Callback> { box SimpleHandler { handler: callback } } } impl Callback for SimpleHandler { #[allow(unsafe_code)] fn callback(&self, cx: *mut JSContext, v: HandleValue) { let global = unsafe { GlobalScope::from_context(cx) }; let _ = self.handler.Call_(&*global, v, ExceptionHandling::Report); } } } #[allow(unrooted_must_root)] fn PromiseAttribute(&self) -> Rc<Promise> { Promise::new(&self.global()) } fn AcceptPromise(&self, _promise: &Promise) { } fn AcceptNullablePromise(&self, _promise: Option<&Promise>) { } fn PassSequenceSequence(&self, _seq: Vec<Vec<i32>>) {} fn ReturnSequenceSequence(&self) -> Vec<Vec<i32>> { vec![] } fn PassUnionSequenceSequence(&self, seq: LongOrLongSequenceSequence) { match seq { LongOrLongSequenceSequence::Long(_) => (), LongOrLongSequenceSequence::LongSequenceSequence(seq) => { let _seq: Vec<Vec<i32>> = seq; } } } #[allow(unsafe_code)] fn CrashHard(&self) { static READ_ONLY_VALUE: i32 = 0; unsafe { let p: *mut u32 = &READ_ONLY_VALUE as *const _ as *mut _; ptr::write_volatile(p, 0xbaadc0de); } } fn AdvanceClock(&self, ms: i32, tick: bool) { self.global().as_window().advance_animation_clock(ms, tick); } fn Panic(&self) { panic!("explicit panic from script") } } impl TestBinding { pub fn BooleanAttributeStatic(_: &GlobalScope) -> bool { false } pub fn SetBooleanAttributeStatic(_: &GlobalScope, _: bool) {} pub fn ReceiveVoidStatic(_: &GlobalScope) {} pub fn PrefControlledStaticAttributeDisabled(_: &GlobalScope) -> bool { false } pub fn PrefControlledStaticAttributeEnabled(_: &GlobalScope) -> bool { false } pub fn PrefControlledStaticMethodDisabled(_: &GlobalScope) {} pub fn PrefControlledStaticMethodEnabled(_: &GlobalScope) {} pub fn FuncControlledStaticAttributeDisabled(_: &GlobalScope) -> bool { false } pub fn FuncControlledStaticAttributeEnabled(_: &GlobalScope) -> bool { false } pub fn FuncControlledStaticMethodDisabled(_: &GlobalScope) {} pub fn FuncControlledStaticMethodEnabled(_: &GlobalScope) {} } #[allow(unsafe_code)] impl TestBinding { pub unsafe fn condition_satisfied(_: *mut JSContext, _: HandleObject) -> bool { true } pub unsafe fn condition_unsatisfied(_: *mut JSContext, _: HandleObject) -> bool { false } } #[derive(JSTraceable, HeapSizeOf)] pub struct TestBindingCallback { #[ignore_heap_size_of = "unclear ownership semantics"] promise: TrustedPromise, value: DOMString, } impl TestBindingCallback { #[allow(unrooted_must_root)] pub fn invoke(self) { let p = self.promise.root(); let cx = p.global().get_cx(); let _ac = JSAutoCompartment::new(cx, p.reflector().get_jsobject().get()); p.resolve_native(cx, &self.value); } }<|fim▁end|>
<|file_name|>process.rs<|end_file_name|><|fim▁begin|>//! Extensions to `std::process` for Windows. #![stable(feature = "process_extensions", since = "1.2.0")] use crate::ffi::OsStr; use crate::os::windows::io::{ AsHandle, AsRawHandle, BorrowedHandle, FromRawHandle, IntoRawHandle, OwnedHandle, RawHandle, }; use crate::process; use crate::sealed::Sealed; use crate::sys; use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; #[stable(feature = "process_extensions", since = "1.2.0")] impl FromRawHandle for process::Stdio { unsafe fn from_raw_handle(handle: RawHandle) -> process::Stdio { let handle = sys::handle::Handle::from_raw_handle(handle as *mut _); let io = sys::process::Stdio::Handle(handle); process::Stdio::from_inner(io) } } #[unstable(feature = "io_safety", issue = "87074")] impl From<OwnedHandle> for process::Stdio { fn from(handle: OwnedHandle) -> process::Stdio { let handle = sys::handle::Handle::from_inner(handle); let io = sys::process::Stdio::Handle(handle); process::Stdio::from_inner(io) } } #[stable(feature = "process_extensions", since = "1.2.0")] impl AsRawHandle for process::Child { #[inline] fn as_raw_handle(&self) -> RawHandle { self.as_inner().handle().as_raw_handle() as *mut _ } } #[unstable(feature = "io_safety", issue = "87074")] impl AsHandle for process::Child { #[inline] fn as_handle(&self) -> BorrowedHandle<'_> { self.as_inner().handle().as_handle() } } #[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::Child { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw_handle() as *mut _ } } #[unstable(feature = "io_safety", issue = "87074")] impl From<process::Child> for OwnedHandle { fn from(child: process::Child) -> OwnedHandle { child.into_inner().into_handle().into_inner() } } #[stable(feature = "process_extensions", since = "1.2.0")] impl AsRawHandle for process::ChildStdin { #[inline] fn as_raw_handle(&self) -> RawHandle { self.as_inner().handle().as_raw_handle() as *mut _ } } #[stable(feature = "process_extensions", since = "1.2.0")] impl AsRawHandle for process::ChildStdout { #[inline] fn as_raw_handle(&self) -> RawHandle { self.as_inner().handle().as_raw_handle() as *mut _ } }<|fim▁hole|> fn as_raw_handle(&self) -> RawHandle { self.as_inner().handle().as_raw_handle() as *mut _ } } #[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::ChildStdin { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw_handle() as *mut _ } } #[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::ChildStdout { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw_handle() as *mut _ } } #[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::ChildStderr { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw_handle() as *mut _ } } /// Windows-specific extensions to [`process::ExitStatus`]. /// /// This trait is sealed: it cannot be implemented outside the standard library. /// This is so that future additional methods are not breaking changes. #[stable(feature = "exit_status_from", since = "1.12.0")] pub trait ExitStatusExt: Sealed { /// Creates a new `ExitStatus` from the raw underlying `u32` return value of /// a process. #[stable(feature = "exit_status_from", since = "1.12.0")] fn from_raw(raw: u32) -> Self; } #[stable(feature = "exit_status_from", since = "1.12.0")] impl ExitStatusExt for process::ExitStatus { fn from_raw(raw: u32) -> Self { process::ExitStatus::from_inner(From::from(raw)) } } /// Windows-specific extensions to the [`process::Command`] builder. /// /// This trait is sealed: it cannot be implemented outside the standard library. /// This is so that future additional methods are not breaking changes. #[stable(feature = "windows_process_extensions", since = "1.16.0")] pub trait CommandExt: Sealed { /// Sets the [process creation flags][1] to be passed to `CreateProcess`. /// /// These will always be ORed with `CREATE_UNICODE_ENVIRONMENT`. /// /// [1]: https://docs.microsoft.com/en-us/windows/win32/procthread/process-creation-flags #[stable(feature = "windows_process_extensions", since = "1.16.0")] fn creation_flags(&mut self, flags: u32) -> &mut process::Command; /// Forces all arguments to be wrapped in quote (`"`) characters. /// /// This is useful for passing arguments to [MSYS2/Cygwin][1] based /// executables: these programs will expand unquoted arguments containing /// wildcard characters (`?` and `*`) by searching for any file paths /// matching the wildcard pattern. /// /// Adding quotes has no effect when passing arguments to programs /// that use [msvcrt][2]. This includes programs built with both /// MinGW and MSVC. /// /// [1]: <https://github.com/msys2/MSYS2-packages/issues/2176> /// [2]: <https://msdn.microsoft.com/en-us/library/17w5ykft.aspx> #[unstable(feature = "windows_process_extensions_force_quotes", issue = "82227")] fn force_quotes(&mut self, enabled: bool) -> &mut process::Command; /// Append literal text to the command line without any quoting or escaping. /// /// This is useful for passing arguments to `cmd.exe /c`, which doesn't follow /// `CommandLineToArgvW` escaping rules. #[unstable(feature = "windows_process_extensions_raw_arg", issue = "29494")] fn raw_arg<S: AsRef<OsStr>>(&mut self, text_to_append_as_is: S) -> &mut process::Command; } #[stable(feature = "windows_process_extensions", since = "1.16.0")] impl CommandExt for process::Command { fn creation_flags(&mut self, flags: u32) -> &mut process::Command { self.as_inner_mut().creation_flags(flags); self } fn force_quotes(&mut self, enabled: bool) -> &mut process::Command { self.as_inner_mut().force_quotes(enabled); self } fn raw_arg<S: AsRef<OsStr>>(&mut self, raw_text: S) -> &mut process::Command { self.as_inner_mut().raw_arg(raw_text.as_ref()); self } }<|fim▁end|>
#[stable(feature = "process_extensions", since = "1.2.0")] impl AsRawHandle for process::ChildStderr { #[inline]
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Simple [DEFLATE][def]-based compression. This is a wrapper around the //! [`miniz`][mz] library, which is a one-file pure-C implementation of zlib. //! //! [def]: https://en.wikipedia.org/wiki/DEFLATE //! [mz]: https://code.google.com/p/miniz/ #![crate_name = "flate"] #![experimental] #![staged_api] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] #[cfg(test)] #[macro_use] extern crate log; extern crate libc; use libc::{c_void, size_t, c_int}; use std::ops::Deref; use std::ptr::Unique; use std::slice; pub struct Bytes { ptr: Unique<u8>, len: uint, } impl Deref for Bytes { type Target = [u8]; fn deref(&self) -> &[u8] { unsafe { slice::from_raw_mut_buf(&self.ptr.0, self.len) } } } impl Drop for Bytes { fn drop(&mut self) { unsafe { libc::free(self.ptr.0 as *mut _); } } } #[link(name = "miniz", kind = "static")] extern { /// Raw miniz compression function. fn tdefl_compress_mem_to_heap(psrc_buf: *const c_void, src_buf_len: size_t, pout_len: *mut size_t, flags: c_int) -> *mut c_void; /// Raw miniz decompression function. fn tinfl_decompress_mem_to_heap(psrc_buf: *const c_void, src_buf_len: size_t, pout_len: *mut size_t, flags: c_int) -> *mut c_void; } static LZ_NORM : c_int = 0x80; // LZ with 128 probes, "normal" static TINFL_FLAG_PARSE_ZLIB_HEADER : c_int = 0x1; // parse zlib header and adler32 checksum static TDEFL_WRITE_ZLIB_HEADER : c_int = 0x01000; // write zlib header and adler32 checksum fn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Option<Bytes> { unsafe { let mut outsz : size_t = 0; let res = tdefl_compress_mem_to_heap(bytes.as_ptr() as *const _, bytes.len() as size_t, &mut outsz, flags); if !res.is_null() { let res = Unique(res as *mut u8); Some(Bytes { ptr: res, len: outsz as uint }) } else { None } } } /// Compress a buffer, without writing any sort of header on the output. pub fn deflate_bytes(bytes: &[u8]) -> Option<Bytes> { deflate_bytes_internal(bytes, LZ_NORM) } /// Compress a buffer, using a header that zlib can understand. pub fn deflate_bytes_zlib(bytes: &[u8]) -> Option<Bytes> { deflate_bytes_internal(bytes, LZ_NORM | TDEFL_WRITE_ZLIB_HEADER) } fn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Option<Bytes> { unsafe { let mut outsz : size_t = 0; let res = tinfl_decompress_mem_to_heap(bytes.as_ptr() as *const _, bytes.len() as size_t, &mut outsz, flags); if !res.is_null() { let res = Unique(res as *mut u8); Some(Bytes { ptr: res, len: outsz as uint }) } else { None } } } /// Decompress a buffer, without parsing any sort of header on the input. pub fn inflate_bytes(bytes: &[u8]) -> Option<Bytes> { inflate_bytes_internal(bytes, 0) } /// Decompress a buffer that starts with a zlib header. pub fn inflate_bytes_zlib(bytes: &[u8]) -> Option<Bytes> { inflate_bytes_internal(bytes, TINFL_FLAG_PARSE_ZLIB_HEADER) } #[cfg(test)] mod tests { use super::{inflate_bytes, deflate_bytes}; use std::rand; use std::rand::Rng; #[test] fn test_flate_round_trip() { let mut r = rand::thread_rng(); let mut words = vec!(); for _ in range(0u, 20) { let range = r.gen_range(1u, 10); let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>(); words.push(v); } for _ in range(0u, 20) { let mut input = vec![]; for _ in range(0u, 2000) { input.push_all(r.choose(words.as_slice()).unwrap().as_slice()); } debug!("de/inflate of {} bytes of random word-sequences", input.len()); let cmp = deflate_bytes(input.as_slice()).expect("deflation failed"); let out = inflate_bytes(cmp.as_slice()).expect("inflation failed"); debug!("{} bytes deflated to {} ({:.1}% size)", input.len(), cmp.len(), 100.0 * ((cmp.len() as f64) / (input.len() as f64))); assert_eq!(input, out.as_slice()); } }<|fim▁hole|> #[test] fn test_zlib_flate() { let bytes = vec!(1, 2, 3, 4, 5); let deflated = deflate_bytes(bytes.as_slice()).expect("deflation failed"); let inflated = inflate_bytes(deflated.as_slice()).expect("inflation failed"); assert_eq!(inflated.as_slice(), bytes); } }<|fim▁end|>
<|file_name|>test_agg.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import User from pandas import read_csv, notnull, DataFrame from numpy import isnan from django.test import TestCase from rhizome.models.campaign_models import Campaign, CampaignType, \ DataPointComputed, AggDataPoint from rhizome.models.location_models import Location, LocationType, \ LocationTree from rhizome.models.indicator_models import Indicator, IndicatorTag, \ IndicatorToTag, CalculatedIndicatorComponent from rhizome.models.document_models import Document, SourceSubmission from rhizome.models.datapoint_models import DataPoint from rhizome.cache_meta import LocationTreeCache from rhizome.tests.setup_helpers import TestSetupHelpers class AggRefreshTestCase(TestCase): ''' ''' def __init__(self, *args, **kwargs): super(AggRefreshTestCase, self).__init__(*args, **kwargs) def setUp(self): self.ts = TestSetupHelpers() data_df = read_csv('rhizome/tests/_data/calc_data.csv') self.create_metadata() self.user = User.objects.get(username="test") self.test_df = data_df[data_df['is_raw'] == 1] self.target_df = data_df[data_df['is_raw'] == 0] self.campaign_id = Campaign.objects.all()[0].id self.top_lvl_location = Location.objects.filter(name='Nigeria')[0] ltr = LocationTreeCache() ltr.main() def create_metadata(self): ''' Creating the Indicator, location, Campaign, meta data needed for the system to aggregate / caclulate. ''' read_csv('rhizome/tests/_data/campaigns.csv') location_df = read_csv('rhizome/tests/_data/locations.csv') indicator_df = read_csv('rhizome/tests/_data/indicators.csv') user_id = User.objects.create_user('test', '[email protected]', 'test').id self.location_type1 = LocationType.objects.create(admin_level=0, name="country", id=1) self.location_type2 = LocationType.objects.create(admin_level=1, name="province", id=2) campaign_type1 = CampaignType.objects.create(name='test') self.locations = self.model_df_to_data(location_df, Location) self.indicators = self.model_df_to_data(indicator_df, Indicator) ind_tag = IndicatorTag.objects.create(tag_name='Polio') sub_tag = IndicatorTag.objects.create(tag_name='Polio Management', parent_tag_id=ind_tag.id) ind_to_tag_batch = [IndicatorToTag( **{'indicator_tag_id': sub_tag.id, 'indicator_id': ind.id}) for ind in self.indicators] IndicatorToTag.objects.bulk_create(ind_to_tag_batch) self.campaign_id = Campaign.objects.create( start_date='2016-01-01', end_date='2016-01-02', campaign_type_id=campaign_type1.id ).id document = Document.objects.create( doc_title='test', created_by_id=user_id, guid='test') self.ss = SourceSubmission.objects.create( document_id=document.id, submission_json='', row_number=0, data_date='2016-01-01' ).id def model_df_to_data(self, model_df, model): meta_ids = [] non_null_df = model_df.where((notnull(model_df)), None) list_of_dicts = non_null_df.transpose().to_dict() for row_ix, row_dict in list_of_dicts.iteritems(): row_id = model.objects.create(**row_dict) meta_ids.append(row_id) return meta_ids def create_raw_datapoints(self): for row_ix, row_data in self.test_df.iterrows(): dp_id = self.create_datapoint(row_data.location_id, row_data .data_date, row_data.indicator_id, row_data.value) # def create_datapoint(self, **kwargs): def create_datapoint(self, location_id, data_date, indicator_id, value): ''' Right now this is being performed as a database insert. I would like to Test this against the data entry resource, but this will do for now in order to test caching. ''' document_id = Document.objects.get(doc_title='test').id ss_id = SourceSubmission.objects.get(document_id=document_id).id dp = DataPoint.objects.create( location_id=location_id, data_date=data_date, indicator_id=indicator_id, campaign_id=self.campaign_id, value=value, source_submission_id=ss_id, unique_index=str(location_id) + str(data_date) + str(self.campaign_id) + str(indicator_id) ) return dp def test_location_aggregation(self): ''' Using the calc_data.csv, create a test_df and target_df. Ensure that the aggregation and calcuation are working properly, but ingesting the stored data, running the cache, and checking that the calculated data for the aggregate location (parent location, in this case Nigeria) is as expected. In addition to the datapoints in the test file, i insert a null valu to ensure that any null won't corrpupt the calculation. python manage.py test rhizome.tests.test_agg.AggRefreshTestCase. test_location_aggregation --settings=rhizome.settings.test ''' self.create_raw_datapoints() indicator_id, data_date, raw_location_id,\ agg_location_id, null_location_id, NaN_location_id = \ 22, '2016-01-01', 12910, 12907, 12928, 12913 location_ids = Location.objects.filter( parent_location_id=agg_location_id).values_list('id', flat=True) DataPoint.objects.filter( indicator_id=indicator_id, # data_date = data_date, location_id=null_location_id ).update(value=None) DataPoint.objects.filter( indicator_id=indicator_id, # data_date = data_date, location_id=NaN_location_id ).update(value='NaN') dps = DataPoint.objects.filter( indicator_id=indicator_id, # data_date = data_date, location_id__in=location_ids, value__isnull=False ).values_list('id', 'value') sum_dp_value = sum([y for x, y in dps if not isnan(y)]) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() ################################################# ## ensure that raw data gets into AggDataPoint ## ################################################# raw_value = DataPoint.objects.get( # data_date = data_date, indicator_id=indicator_id, location_id=raw_location_id)\ .value ind_obj = Indicator.objects.get(id=indicator_id) raw_value_in_agg = AggDataPoint.objects.get( # data_date = data_date, indicator_id=indicator_id, location_id=raw_location_id)\ .value self.assertEqual(raw_value, raw_value_in_agg) ############################################# ## ensure that the aggregated data gets in ## ############################################# loc_tree_df = DataFrame(list(LocationTree.objects.all().values())) agg_df = DataFrame(list(AggDataPoint.objects.filter(\ indicator_id=indicator_id,\ campaign_id=self.campaign_id ).values())) agg_value = AggDataPoint.objects.get( indicator_id=indicator_id, campaign_id=self.campaign_id, location_id=agg_location_id ).value self.assertEqual(agg_value, sum_dp_value) ###################################################### ## ensure that any raw data will override aggregate ## ###################################################### override_value = 909090 agg_override_dp = self.create_datapoint(agg_location_id, data_date, indicator_id, override_value) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() override_value_in_agg = AggDataPoint.objects.get( campaign_id=self.campaign_id, indicator_id=indicator_id, location_id=agg_location_id).value self.assertEqual(override_value, override_value_in_agg) ########################################### # ensure that percentages do not aggregate ########################################### pct_ind = Indicator.objects.create( name='pct missed', short_name='pct_missed', description='missed pct', data_format='pct', source_name='my brain', ) dp_1 = DataPoint.objects.create( indicator_id=pct_ind.id, location_id=location_ids[0], campaign_id=self.campaign_id, data_date=data_date, value=.2, source_submission_id=self.ss, unique_index=1 ) dp_2 = DataPoint.objects.create( indicator_id=pct_ind.id, location_id=location_ids[1], campaign_id=self.campaign_id, data_date=data_date, value=.6, source_submission_id=self.ss, unique_index=2 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() try: agg_dp_qs = AggDataPoint.objects.get( location_id=agg_location_id, indicator_id=pct_ind, campaign_id=self.campaign_id, ) error_ocurred = False except AggDataPoint.DoesNotExist: error_ocurred = True self.assertTrue(error_ocurred) def test_raw_data_to_computed(self): ''' This just makes sure that any data in the datapoint table, gets into the Calculated DataPoint table. That is, i insert a value for missed children in Borno, the same exact data should be in the datapoint_with_computed table no matter what. ''' self.create_raw_datapoints() indicator_id, data_date, raw_location_id,\ agg_location_id, campaign_id = 22, '2016-01-01', 12910, 12907, 1 location_ids = Location.objects.filter( parent_location_id=agg_location_id).values_list('id', flat=True) dp_values = DataPoint.objects.filter( indicator_id=indicator_id, data_date=data_date, location_id__in=location_ids ).values_list('value', flat=True) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() ############################################################ ## ensure that raw data gets into datapoint_with_computed ## ############################################################ raw_value = DataPoint.objects.get(data_date=data_date, indicator_id=indicator_id, location_id=raw_location_id)\ .value raw_value_in_agg = DataPointComputed.objects.get( campaign_id=self.campaign_id, indicator_id=indicator_id, location_id=raw_location_id)\ .value self.assertEqual(raw_value, raw_value_in_agg) def test_sum_and_pct(self): ''' The system uses the "PART_TO_BE_SUMMED" edge type in order to create indicators such that the sum of: - Number Missed - Missed due to other reasons(24) - Child Absent(251) - Not in Plan (267) - Not Visited (268) - Non Compliance(264) gives us: All Missed Children (21) as well as: pct missed children due to refusal (166) Here we create new metadata so we can test this functionality for an Abstracted use case and test that 1. We can SUM indicators 2. We can use the result of #2 as the denominator for a percentage calculation. ''' Indicator.objects.all().delete() data_date, location_id, agg_location_id = '2016-01-01', 12910, 12907 val_1, val_2, val_3 = 303, 808, 909 ## create the parent and sub indicators ## parent_indicator = Indicator.objects.create( name='Number of Avoidable Deaths', short_name='Number of Avoidable Deaths', data_format='int' ) sub_indicator_1 = Indicator.objects.create( name='Number of Deaths due to Conflict', short_name='Number of Deaths due to Conflict', data_format='int' ) sub_indicator_2 = Indicator.objects.create( name='Number of Deaths due to Malaria', short_name='Number of Deaths due to Malaria', data_format='int' ) sub_indicator_3 = Indicator.objects.create(<|fim▁hole|> pct_indicator = Indicator.objects.create( name='pct of Deaths due to Hunger', short_name='pct of Deaths due to Hunger', data_format='pct' ) ## FOR SUM OF PARTS CALUCLATIONS ## indicator_calc_1 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_1.id, calculation='PART_TO_BE_SUMMED' ) indicator_calc_2 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_2.id, calculation='PART_TO_BE_SUMMED' ) indicator_calc_3 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_3.id, calculation='PART_TO_BE_SUMMED' ) ## FOR PART OVER WHOLE CALCULATIONS ## indicator_calc_numerator = CalculatedIndicatorComponent.objects.create( indicator_id=pct_indicator.id, indicator_component_id=sub_indicator_3.id, calculation='NUMERATOR' ) indicator_calc_denominator = CalculatedIndicatorComponent.objects.create( indicator_id=pct_indicator.id, indicator_component_id=parent_indicator.id, calculation='DENOMINATOR' ) ss_id = SourceSubmission.objects.all()[0].id ## create the datapoints ## dp_1 = DataPoint.objects.create( indicator_id=sub_indicator_1.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_1, source_submission_id=ss_id, unique_index=1 ) dp_2 = DataPoint.objects.create( indicator_id=sub_indicator_2.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_2, source_submission_id=ss_id, unique_index=2 ) dp_3 = DataPoint.objects.create( indicator_id=sub_indicator_3.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_3, source_submission_id=ss_id, unique_index=3 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() calc_value_sum = DataPointComputed.objects.get( indicator_id=parent_indicator.id, campaign_id=self.campaign_id, location_id=location_id ).value calc_value_pct = DataPointComputed.objects.get( indicator_id=pct_indicator.id, campaign_id=self.campaign_id, location_id=location_id ).value # test SUM calculation sum_target_value = val_1 + val_2 + val_3 self.assertEqual(calc_value_sum, sum_target_value) # test part over whole calction pct_target_value = val_3 / float(sum_target_value) self.assertEqual(calc_value_pct, pct_target_value) def test_part_of_difference(self): ''' see here: rhizome.work/manage_system/manage/indicator/187 We use this calculation to perform the following calculation: WHOLE_OF_DIFFERENCE(x) - PART_OF_DIFFERENCE(y) ----------------------------------------- WHOLE_OF_DIFFERENCE(x) ''' data_date, location_id, agg_location_id = '2016-01-01', 12910, 12907 x, y = 303.00, 808.00 ## create the parent and sub indicators ## parent_indicator = Indicator.objects.create( name='Refsual Conversion', short_name='Refsual Conversion', data_format='pct' ) sub_indicator_part = Indicator.objects.create( name='Refusals After Revisit', short_name='Refusals After Revisit', data_format='int' ) sub_indicator_denom = Indicator.objects.create( name='Refusals Before Revisit', short_name='Refusals Before Revisit', data_format='int' ) ## FOR SUM OF PARTS CALUCLATIONS ## indicator_calc_1 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_part.id, calculation='PART_OF_DIFFERENCE' ) indicator_calc_3 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_denom.id, calculation='WHOLE_OF_DIFFERENCE' ) ss_id = SourceSubmission.objects.all()[0].id ## create the datapoints ## dp_1 = DataPoint.objects.create( indicator_id=sub_indicator_denom.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=x, source_submission_id=ss_id, unique_index=1 ) dp_2 = DataPoint.objects.create( indicator_id=sub_indicator_part.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=y, source_submission_id=ss_id, unique_index=2 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() calc_value = DataPointComputed.objects.get( indicator_id=parent_indicator.id, campaign_id=self.campaign_id, location_id=location_id ).value # test SUM calculation target_value = (x - y) / x self.assertEqual(round(calc_value, 4), round(target_value, 4)) def test_missing_part_of_sum(self): data_date, location_id, agg_location_id = '2016-01-01', 12910, 12907 val_1, val_2 = 101, 102 ## create the parent and sub indicators ## parent_indicator = Indicator.objects.create( name='Number of Missing Children', short_name='Number of Avoidable Deaths', data_format='int' ) sub_indicator_1 = Indicator.objects.create( name='Number Missing Due to Refusal', short_name='Number Missing Due to Refusal', data_format='int' ) sub_indicator_2 = Indicator.objects.create( name='Number Missing Due to Absence', short_name='Number Missing Due to Absence', data_format='int' ) sub_indicator_3 = Indicator.objects.create( name='Number Missing Due to ??', short_name='Number Missing Due to ??', data_format='int' ) indicator_calc_1 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_1.id, calculation='PART_TO_BE_SUMMED' ) indicator_calc_2 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_2.id, calculation='PART_TO_BE_SUMMED' ) indicator_calc_3 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_3.id, calculation='PART_TO_BE_SUMMED' ) ss_id = SourceSubmission.objects.all()[0].id ## create the datapoints. We're only adding data points for ## ## two of the three datapoints that are mapped as parts to be summed ## dp_1 = DataPoint.objects.create( indicator_id=sub_indicator_1.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_1, source_submission_id=ss_id, unique_index=1 ) dp_2 = DataPoint.objects.create( indicator_id=sub_indicator_2.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_2, source_submission_id=ss_id, unique_index=2 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() calc_value_sum = DataPointComputed.objects.get( indicator_id=parent_indicator.id, campaign_id=self.campaign_id, location_id=location_id ).value sum_target_value = val_1 + val_2 self.assertEqual(calc_value_sum, sum_target_value) def test_recursive_sum(self): ''' Consider the case in which we have "number of missed children" which is the sum of "missed children due to absence", "missed children due to refusal", and "missed children due to child absence." Now consider that "missed children due to refusal" is also generated from the sum of "refusal due to religious reasons", "refusal due to too many rounds", "refusal due to - unhappy with team " (see more here: http://rhizome.work/manage_system/manage/indicator/264). There are two levels here and this test aims to cover this use case. ''' data_date, location_id = '2016-01-01', 12910 Indicator.objects.all().delete() parent_indicator = Indicator.objects.create( name='Number of Avoidable Deaths', short_name='Number of Avoidable Deaths', data_format='int' ) sub_indicator_1 = Indicator.objects.create( name='Number of Deaths due to Conflict', short_name='Number of Deaths due to Conflict', data_format='int' ) sub_sub_indicator_1 = Indicator.objects.create( name='Number Conflict Deaths - Children', short_name='Conflict Deaths - Children', data_format='int' ) sub_sub_indicator_2 = Indicator.objects.create( name='Number of Adult Civilian Deaths', short_name='Number of Adult Civilian Deaths', data_format='int' ) sub_sub_indicator_3 = Indicator.objects.create( name='Number of Conflict Deaths - Militants', short_name='Conflict Deaths - Militants', data_format='int' ) sub_indicator_2 = Indicator.objects.create( name='Number of Deaths due to Malaria', short_name='Number of Deaths due to Malaria', data_format='int' ) sub_indicator_2_sub_1 = Indicator.objects.create( name='Number of Deaths due to Malaria -- Child had No Net', short_name='Number of Deaths due to Malaria -- no net', data_format='int' ) sub_indicator_2_sub_2 = Indicator.objects.create( name='Number of Deaths due to Malaria -- Child had No Medicine', short_name='Number of Deaths due to Malaria -- no Medicie', data_format='int' ) sub_indicator_3 = Indicator.objects.create( name='Number of Deaths due to Hunger', short_name='Number of Deaths due to Hunger', data_format='int' ) ## FOR SUM OF PARTS CALUCLATIONS ## indicator_calc_1 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_1.id, calculation='PART_TO_BE_SUMMED' ) indicator_calc_2 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_2.id, calculation='PART_TO_BE_SUMMED' ) indicator_calc_3 = CalculatedIndicatorComponent.objects.create( indicator_id=parent_indicator.id, indicator_component_id=sub_indicator_3.id, calculation='PART_TO_BE_SUMMED' ) ## 2nd layer of indicator calculation ## sub_indicator_calc_1 = CalculatedIndicatorComponent.objects.create( indicator_id=sub_indicator_1.id, indicator_component_id=sub_sub_indicator_1.id, calculation='PART_TO_BE_SUMMED' ) sub_indicator_calc_2 = CalculatedIndicatorComponent.objects.create( indicator_id=sub_indicator_1.id, indicator_component_id=sub_sub_indicator_2.id, calculation='PART_TO_BE_SUMMED' ) sub_indicator_calc_3 = CalculatedIndicatorComponent.objects.create( indicator_id=sub_indicator_1.id, indicator_component_id=sub_sub_indicator_3.id, calculation='PART_TO_BE_SUMMED' ) ## 2nd layer of indicator calculation ## sub_indicator_calc_1 = CalculatedIndicatorComponent.objects.create( indicator_id=sub_indicator_2.id, indicator_component_id=sub_indicator_2_sub_1.id, calculation='PART_TO_BE_SUMMED' ) sub_indicator_calc_2 = CalculatedIndicatorComponent.objects.create( indicator_id=sub_indicator_2.id, indicator_component_id=sub_indicator_2_sub_2.id, calculation='PART_TO_BE_SUMMED' ) ## create all the datapoints ## values_to_insert = { sub_indicator_2.id: 33, sub_indicator_3.id: 44, sub_sub_indicator_1.id: 44, sub_sub_indicator_2.id: 55, sub_sub_indicator_3.id: 66, sub_indicator_2_sub_1.id: 77, sub_indicator_2_sub_2.id: 88, } for k, v in values_to_insert.iteritems(): self.create_datapoint(location_id, data_date, k, v) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() parent_indicator_target_value = sum(values_to_insert.values()) parent_indicator_1_actual_value = DataPointComputed.objects.get( location_id=location_id, indicator_id=parent_indicator, ).value self.assertEqual(parent_indicator_1_actual_value, parent_indicator_target_value) # test that a parent overrides the sum of its children when there ## are multiple levels of indicator calcuations ## sub_2_target_val = values_to_insert[sub_indicator_2.id] sub_2_actual_val = DataPointComputed.objects.get( location_id=location_id, indicator_id=sub_indicator_2.id, ).value self.assertEqual(sub_2_target_val, sub_2_actual_val) def test_boolean_aggregation(self): # create a boolean indicato boolean_indicator = Indicator.objects.create( name='Is Controlled by "Anti Governemnt Elements"', short_name='Is at War', data_format='bool' ) # find the locations for which we should store raw data.. For instance # if it is 'district is at war', then we dont expect data stored at # the porivnce level. Here though, we get all children of a particluar # parent. locations = Location.objects.filter( parent_location_id=self.top_lvl_location.id) # split the data into 1 value being fale, the rest being trye. # this aludes to the fact that the parent location shoul dhave a value # that is somethign like [ 1 / data.length() ] false_loc_id = locations[0].id true_loc_list = locations[1:] ## create the true and false datapoints ## false_datapoint = DataPoint.objects.create( campaign_id=self.campaign_id, location_id=false_loc_id, indicator_id=boolean_indicator.id, source_submission_id=self.ss, value=0 ) true_datapoint_batch = [DataPoint(**{ 'campaign_id': self.campaign_id, 'location_id': loc.id, 'indicator_id': boolean_indicator.id, 'source_submission_id': self.ss, 'value': 1, 'unique_index': str(self.campaign_id) + str(boolean_indicator.id) + str(loc.id) }) for loc in true_loc_list] DataPoint.objects.bulk_create(true_datapoint_batch) # run the agg refresh ( this is the code that will actually transofrm # the booleans to numerics. ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() # now get the expected aggrgated data and compare it with the percentage # value that we expect given how we split up the locations above. dwc_value = DataPointComputed.objects.get( location_id=self.top_lvl_location.id, campaign_id=self.campaign_id, indicator=boolean_indicator.id ).value expected_value = 1 - (1.0 / len(locations)) self.assertEqual(expected_value, dwc_value) def test_calculated_indicator_agg(self): Indicator.objects.all().delete() data_date, agg_location_id = '2016-01-01', 12907 child_locations = Location.objects.filter( parent_location_id=agg_location_id) location_id = child_locations[0].id location_id_2 = child_locations[1].id ## create the parent and sub indicators ## parent_indicator = Indicator.objects.create( name='Number of Avoidable Deaths', short_name='Number of Avoidable Deaths', data_format='int' ) sub_indicator_1 = Indicator.objects.create( name='Number of Deaths due to Conflict', short_name='Number of Deaths due to Conflict', data_format='int' ) pct_indicator = Indicator.objects.create( name='pct of Deaths due to Conflict', short_name='pct of Deaths due to Conflict', data_format='pct' ) ## FOR PART OVER WHOLE CALCULATIONS ## indicator_calc_numerator = CalculatedIndicatorComponent.objects.create( indicator_id=pct_indicator.id, indicator_component_id=sub_indicator_1.id, calculation='NUMERATOR' ) indicator_calc_denominator = CalculatedIndicatorComponent.objects.create( indicator_id=pct_indicator.id, indicator_component_id=parent_indicator.id, calculation='DENOMINATOR' ) val_1 = 32 val_2 = 100 val_1_loc_2 = 48 val_2_loc_2 = 200 ss_id = SourceSubmission.objects.all()[0].id ## create the datapoints ## dp_1 = DataPoint.objects.create( indicator_id=sub_indicator_1.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_1, source_submission_id=ss_id, unique_index=1 ) dp_2 = DataPoint.objects.create( indicator_id=parent_indicator.id, data_date=data_date, location_id=location_id, campaign_id=self.campaign_id, value=val_2, source_submission_id=ss_id, unique_index=2 ) dp_1_loc_2 = DataPoint.objects.create( indicator_id=sub_indicator_1.id, data_date=data_date, location_id=location_id_2, campaign_id=self.campaign_id, value=val_1_loc_2, source_submission_id=ss_id, unique_index=3 ) dp_2_loc_2 = DataPoint.objects.create( indicator_id=parent_indicator.id, data_date=data_date, location_id=location_id_2, campaign_id=self.campaign_id, value=val_2_loc_2, source_submission_id=ss_id, unique_index=4 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() calc_value_pct = DataPointComputed.objects.get( indicator_id=pct_indicator.id, campaign_id=self.campaign_id, location_id=location_id ).value calc_value_pct_2 = DataPointComputed.objects.get( indicator_id=pct_indicator.id, campaign_id=self.campaign_id, location_id=location_id_2 ).value # test part over whole calculation for child locations pct_target_value = val_1 / float(val_2) self.assertEqual(calc_value_pct, pct_target_value) pct_target_value_2 = val_1_loc_2 / float(val_2_loc_2) self.assertEqual(calc_value_pct_2, pct_target_value_2) # make sure that part over whole aggregates as well total_dp = DataPointComputed.objects.get( indicator_id=parent_indicator.id, campaign_id=self.campaign_id, location_id=agg_location_id).value self.assertEqual(total_dp, val_2 + val_2_loc_2) try: pct_dp = DataPointComputed.objects.get( indicator_id=pct_indicator.id, campaign_id=self.campaign_id, location_id=agg_location_id).value except ObjectDoesNotExist: fail("aggregation did not work") self.assertEqual(round(pct_dp, 5), round( (val_1 + val_1_loc_2) / float(val_2 + val_2_loc_2), 5)) def test_multiple_calculations(self): num_seen = Indicator.objects.create( name='number children seen', short_name='number children seen', data_format='int' ) num_vacc = Indicator.objects.create( name='number children vaccinated', short_name='number children vaccinated', data_format='int' ) num_missed = Indicator.objects.create( name='number children missed', short_name='number children missed', data_format='int' ) pct_missed = Indicator.objects.create( name='pct childrent missed', short_name='pct children missed', data_format='pct' ) indicator_calc_numerator = CalculatedIndicatorComponent.objects.create( indicator_id=pct_missed.id, indicator_component_id=num_missed.id, calculation='NUMERATOR' ) indicator_calc_denominator = CalculatedIndicatorComponent.objects.create( indicator_id=pct_missed.id, indicator_component_id=num_seen.id, calculation='DENOMINATOR' ) indicator_calc_part_of_diff = CalculatedIndicatorComponent.objects.create( indicator_id=pct_missed.id, indicator_component_id=num_vacc.id, calculation='PART_OF_DIFFERENCE' ) indicator_calc_part_of_whole = CalculatedIndicatorComponent.objects.create( indicator_id=pct_missed.id, indicator_component_id=num_seen.id, calculation='WHOLE_OF_DIFFERENCE' ) num_missed_val = 45.0 num_seen_val = 100.0 num_vacc_val = 55.0 ss_id = SourceSubmission.objects.all()[0].id dp_num_missed = DataPoint.objects.create( indicator_id=num_missed.id, location_id=self.top_lvl_location.id, campaign_id=self.campaign_id, value=num_missed_val, source_submission_id=ss_id, unique_index=3 ) dp_num_seen = DataPoint.objects.create( indicator_id=num_seen.id, location_id=self.top_lvl_location.id, campaign_id=self.campaign_id, value=num_seen_val, source_submission_id=ss_id, unique_index=4 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() # check that numerator and denominator option work cdp_pct_missed_1 = DataPointComputed.objects.filter( indicator_id=pct_missed.id)[0] self.assertEqual(cdp_pct_missed_1.value, num_missed_val / float(num_seen_val)) dp_num_vaccinated = DataPoint.objects.create( indicator_id=num_vacc.id, location_id=self.top_lvl_location.id, campaign_id=self.campaign_id, value=num_vacc_val, source_submission_id=ss_id, unique_index=5 ) campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() # check that this works when we can do whole/part of difference cdp_pct_missed_2 = DataPointComputed.objects.filter( indicator_id=pct_missed.id)[0] 1.0 - float(num_vacc_val) / float(num_seen_val) self.assertEqual(cdp_pct_missed_2.value, 0.45) # check that this works when we can only do whole/part of difference DataPoint.objects.filter(indicator_id=num_missed.id).delete() campaign_object = Campaign.objects.get(id = self.campaign_id) campaign_object.aggregate_and_calculate() cdp_pct_missed_3 = DataPointComputed.objects.filter( indicator_id=pct_missed.id)[0] self.assertEqual(cdp_pct_missed_3.value, 0.45)<|fim▁end|>
name='Number of Deaths due to Hunger', short_name='Number of Deaths due to Hunger', data_format='int' )
<|file_name|>jsonconfig.py<|end_file_name|><|fim▁begin|>""" Read a dictionary from a JSON file, and add its contents to a Python dictionary. """ import json import types from instmakelib import rtimport INSTMAKE_SITE_DIR = "instmakesite" # These are the supported field names # =================================== # The name of the plugin (without ".py") for logging # usage of instmake CONFIG_USAGE_LOGGER = "usage-logger" # The name of the plugin (without ".py") for normalizing # path names in the clidiff report. CONFIG_CLIDIFF_NORMPATH = "clidiff-normpath" def update(caller_config, json_filename): # This will throw errors fh = open(json_filename) file_config = json.load(fh) fh.close() assert type(file_config) == types.DictType caller_config.update(file_config) def load_site_plugin(name): """Import a plugin from the instmakesite directory. The import can throw exceptions that the caller has to catch.""" plugin_name = INSTMAKE_SITE_DIR + "." + name<|fim▁hole|><|fim▁end|>
return rtimport.rtimport(plugin_name)
<|file_name|>faForceEquation.H<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\ ## #### ###### | ## ## ## | Copyright: ICE Stroemungsfoschungs GmbH ## ## #### | ## ## ## | http://www.ice-sf.at ## #### ###### | ------------------------------------------------------------------------------- ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | \\ / A nd | Copyright held by original author \\/ M anipulation | ------------------------------------------------------------------------------- License This file is based on OpenFOAM. OpenFOAM is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OpenFOAM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>. Class Foam::faForceEquation Description Force a fvMatrix to fixed values in specific places <|fim▁hole|>SourceFiles faForceEquation.C Contributors/Copyright: 2011, 2013-2014 Bernhard F.W. Gschaider <[email protected]> SWAK Revision: $Id$ \*---------------------------------------------------------------------------*/ #ifndef faForceEquation_H #define faForceEquation_H #include "FaFieldValueExpressionDriver.H" #include "faMatrix.H" #include "DynamicList.H" // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // namespace Foam { /*---------------------------------------------------------------------------*\ Class faForceEquation Declaration \*---------------------------------------------------------------------------*/ template<class T> class faForceEquation : protected FaFieldValueExpressionDriver { // Private data faForceEquation(const faForceEquation&); string valueExpression_; string maskExpression_; bool verbose_; bool getMask(DynamicList<label> &,const word &psi); public: // Constructors //- Construct from a dictionary faForceEquation ( const dictionary& , const fvMesh& ); // Destructor virtual ~faForceEquation(); //- fix equations void operator()(faMatrix<T> &); //- where are the equations fixed tmp<areaScalarField> getMask(); }; // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // } // End namespace Foam // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // #endif // ************************************************************************* //<|fim▁end|>
<|file_name|>use-mod-4.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags: -Z parse-only <|fim▁hole|><|fim▁end|>
use foo::self; //~^ ERROR expected identifier, found keyword `self` fn main() {}
<|file_name|>VerticalLinearizer2DTest.cpp<|end_file_name|><|fim▁begin|>#define BOOST_TEST_DYN_LINK #include <boost/test/unit_test.hpp> #include "utils/VerticalLinearizer2D.hpp" #include <cstdlib> void verticalLinearizer2D_test_array(const std::size_t width, const std::size_t height) {<|fim▁hole|> for(int y = -20; y < height; ++y) { for(int x = -10; x < width; ++x) { BOOST_REQUIRE(linearizer.getX(linearized) == x); BOOST_REQUIRE(linearizer.getY(linearized) == y); BOOST_REQUIRE(linearizer.linearize(x, y) == linearized); linearized += 1; } } } BOOST_AUTO_TEST_CASE( VerticalLinearizer2DTest ) { verticalLinearizer2D_test_array(0, 0); verticalLinearizer2D_test_array(10, 10); verticalLinearizer2D_test_array(20, 60); verticalLinearizer2D_test_array(60, 20); }<|fim▁end|>
VerticalLinearizer2D linearizer (width, height, -10, -20); std::size_t linearized = 0;
<|file_name|>selector_matching.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Selector matching. use dom::PresentationalHintsSynthetizer; use element_state::*; use error_reporting::StdoutErrorReporter; use keyframes::KeyframesAnimation; use media_queries::{Device, MediaType}; use properties::{self, PropertyDeclaration, PropertyDeclarationBlock, ComputedValues}; use restyle_hints::{RestyleHint, DependencySet}; use selector_impl::{ElementExt, TheSelectorImpl, PseudoElement}; use selectors::Element; use selectors::bloom::BloomFilter; use selectors::matching::DeclarationBlock as GenericDeclarationBlock; use selectors::matching::{Rule, SelectorMap}; use sink::Push; use smallvec::VecLike; use std::collections::HashMap; use std::hash::BuildHasherDefault; use std::sync::Arc; use string_cache::Atom; use style_traits::viewport::ViewportConstraints; use stylesheets::{CSSRule, CSSRuleIteratorExt, Origin, Stylesheet}; use viewport::{MaybeNew, ViewportRuleCascade}; pub type DeclarationBlock = GenericDeclarationBlock<Vec<PropertyDeclaration>>; /// This structure holds all the selectors and device characteristics /// for a given document. The selectors are converted into `Rule`s /// (defined in rust-selectors), and introduced in a `SelectorMap` /// depending on the pseudo-element (see `PerPseudoElementSelectorMap`), /// stylesheet origin (see `PerOriginSelectorMap`), and priority /// (see the `normal` and `important` fields in `PerOriginSelectorMap`). /// /// This structure is effectively created once per pipeline, in the /// LayoutThread corresponding to that pipeline. #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct Stylist { /// Device that the stylist is currently evaluating against. pub device: Device, /// Viewport constraints based on the current device. viewport_constraints: Option<ViewportConstraints>, /// If true, the quirks-mode stylesheet is applied. quirks_mode: bool, /// If true, the device has changed, and the stylist needs to be updated. is_device_dirty: bool, /// The current selector maps, after evaluating media /// rules against the current device. element_map: PerPseudoElementSelectorMap, /// The selector maps corresponding to a given pseudo-element /// (depending on the implementation) pseudos_map: HashMap<PseudoElement, PerPseudoElementSelectorMap, BuildHasherDefault<::fnv::FnvHasher>>, /// A map with all the animations indexed by name. animations: HashMap<Atom, KeyframesAnimation>, /// Applicable declarations for a given non-eagerly cascaded pseudo-element. /// These are eagerly computed once, and then used to resolve the new /// computed values on the fly on layout. precomputed_pseudo_element_decls: HashMap<PseudoElement, Vec<DeclarationBlock>, BuildHasherDefault<::fnv::FnvHasher>>, rules_source_order: usize, /// Selector dependencies used to compute restyle hints. state_deps: DependencySet, } impl Stylist { #[inline] pub fn new(device: Device) -> Self { let mut stylist = Stylist { viewport_constraints: None, device: device, is_device_dirty: true, quirks_mode: false, element_map: PerPseudoElementSelectorMap::new(), pseudos_map: HashMap::with_hasher(Default::default()), animations: HashMap::with_hasher(Default::default()), precomputed_pseudo_element_decls: HashMap::with_hasher(Default::default()), rules_source_order: 0, state_deps: DependencySet::new(), }; TheSelectorImpl::each_eagerly_cascaded_pseudo_element(|pseudo| { stylist.pseudos_map.insert(pseudo, PerPseudoElementSelectorMap::new()); }); // FIXME: Add iso-8859-9.css when the document’s encoding is ISO-8859-8. stylist } pub fn update(&mut self, doc_stylesheets: &[Arc<Stylesheet>], stylesheets_changed: bool) -> bool { if !(self.is_device_dirty || stylesheets_changed) { return false; } self.element_map = PerPseudoElementSelectorMap::new(); self.pseudos_map = HashMap::with_hasher(Default::default()); self.animations = HashMap::with_hasher(Default::default()); TheSelectorImpl::each_eagerly_cascaded_pseudo_element(|pseudo| { self.pseudos_map.insert(pseudo, PerPseudoElementSelectorMap::new()); }); self.precomputed_pseudo_element_decls = HashMap::with_hasher(Default::default()); self.rules_source_order = 0; self.state_deps.clear(); for ref stylesheet in TheSelectorImpl::get_user_or_user_agent_stylesheets().iter() { self.add_stylesheet(&stylesheet); } if self.quirks_mode { if let Some(s) = TheSelectorImpl::get_quirks_mode_stylesheet() { self.add_stylesheet(s); } } for ref stylesheet in doc_stylesheets.iter() { self.add_stylesheet(stylesheet); } self.is_device_dirty = false; true } fn add_stylesheet(&mut self, stylesheet: &Stylesheet) { if !stylesheet.is_effective_for_device(&self.device) { return; } let mut rules_source_order = self.rules_source_order; // Take apart the StyleRule into individual Rules and insert // them into the SelectorMap of that priority. macro_rules! append( ($style_rule: ident, $priority: ident) => { if !$style_rule.declarations.$priority.is_empty() { for selector in &$style_rule.selectors { let map = if let Some(ref pseudo) = selector.pseudo_element { self.pseudos_map .entry(pseudo.clone()) .or_insert_with(PerPseudoElementSelectorMap::new) .borrow_for_origin(&stylesheet.origin) } else { self.element_map.borrow_for_origin(&stylesheet.origin) }; map.$priority.insert(Rule { selector: selector.compound_selectors.clone(), declarations: DeclarationBlock { specificity: selector.specificity, declarations: $style_rule.declarations.$priority.clone(), source_order: rules_source_order, }, }); } } }; ); for rule in stylesheet.effective_rules(&self.device) { match *rule { CSSRule::Style(ref style_rule) => { append!(style_rule, normal); append!(style_rule, important); rules_source_order += 1; for selector in &style_rule.selectors { self.state_deps.note_selector(selector.compound_selectors.clone()); } self.rules_source_order = rules_source_order; } CSSRule::Keyframes(ref keyframes_rule) => { debug!("Found valid keyframes rule: {:?}", keyframes_rule); if let Some(animation) = KeyframesAnimation::from_keyframes(&keyframes_rule.keyframes) { debug!("Found valid keyframe animation: {:?}", animation); self.animations.insert(keyframes_rule.name.clone(), animation); } else { // If there's a valid keyframes rule, even if it doesn't // produce an animation, should shadow other animations // with the same name. self.animations.remove(&keyframes_rule.name); } } // We don't care about any other rule. _ => {} } } TheSelectorImpl::each_precomputed_pseudo_element(|pseudo| { // TODO: Consider not doing this and just getting the rules on the // fly. It should be a bit slower, but we'd take rid of the // extra field, and avoid this precomputation entirely. if let Some(map) = self.pseudos_map.remove(&pseudo) { let mut declarations = vec![]; map.user_agent.normal.get_universal_rules(&mut declarations); map.user_agent.important.get_universal_rules(&mut declarations); self.precomputed_pseudo_element_decls.insert(pseudo, declarations); } }) } /// Computes the style for a given "precomputed" pseudo-element, taking the /// universal rules and applying them. pub fn precomputed_values_for_pseudo(&self, pseudo: &PseudoElement, parent: Option<&Arc<ComputedValues>>) -> Option<Arc<ComputedValues>> { debug_assert!(TheSelectorImpl::pseudo_element_cascade_type(pseudo).is_precomputed()); if let Some(declarations) = self.precomputed_pseudo_element_decls.get(pseudo) { let (computed, _) = properties::cascade(self.device.au_viewport_size(), &declarations, false, parent.map(|p| &**p), None, None, Box::new(StdoutErrorReporter)); Some(Arc::new(computed)) } else { parent.map(|p| p.clone()) } } pub fn lazily_compute_pseudo_element_style<E>(&self, element: &E, pseudo: &PseudoElement, parent: &Arc<ComputedValues>) -> Option<Arc<ComputedValues>> where E: Element<Impl=TheSelectorImpl> + PresentationalHintsSynthetizer { debug_assert!(TheSelectorImpl::pseudo_element_cascade_type(pseudo).is_lazy()); if self.pseudos_map.get(pseudo).is_none() { return None; } let mut declarations = vec![]; // NB: This being cached could be worth it, maybe allow an optional // ApplicableDeclarationsCache?. self.push_applicable_declarations(element, None, None, Some(pseudo), &mut declarations); let (computed, _) = properties::cascade(self.device.au_viewport_size(), &declarations, false, Some(&**parent), None, None, Box::new(StdoutErrorReporter)); Some(Arc::new(computed)) } pub fn set_device(&mut self, mut device: Device, stylesheets: &[Arc<Stylesheet>]) { let cascaded_rule = stylesheets.iter() .flat_map(|s| s.effective_rules(&self.device).viewport()) .cascade(); self.viewport_constraints = ViewportConstraints::maybe_new(device.viewport_size, &cascaded_rule); if let Some(ref constraints) = self.viewport_constraints { device = Device::new(MediaType::Screen, constraints.size); } self.is_device_dirty |= stylesheets.iter().any(|stylesheet| { stylesheet.rules().media().any(|media_rule| media_rule.evaluate(&self.device) != media_rule.evaluate(&device)) }); self.device = device; } pub fn viewport_constraints(&self) -> &Option<ViewportConstraints> { &self.viewport_constraints } pub fn set_quirks_mode(&mut self, enabled: bool) { self.quirks_mode = enabled; } /// Returns the applicable CSS declarations for the given element. /// This corresponds to `ElementRuleCollector` in WebKit. /// /// The returned boolean indicates whether the style is *shareable*; /// that is, whether the matched selectors are simple enough to allow the /// matching logic to be reduced to the logic in /// `css::matching::PrivateMatchMethods::candidate_element_allows_for_style_sharing`. pub fn push_applicable_declarations<E, V>( &self, element: &E, parent_bf: Option<&BloomFilter>, style_attribute: Option<&PropertyDeclarationBlock>, pseudo_element: Option<&PseudoElement>, applicable_declarations: &mut V) -> bool where E: Element<Impl=TheSelectorImpl> + PresentationalHintsSynthetizer, V: Push<DeclarationBlock> + VecLike<DeclarationBlock> { assert!(!self.is_device_dirty); assert!(style_attribute.is_none() || pseudo_element.is_none(), "Style attributes do not apply to pseudo-elements"); debug_assert!(pseudo_element.is_none() || !TheSelectorImpl::pseudo_element_cascade_type(pseudo_element.as_ref().unwrap()) .is_precomputed()); let map = match pseudo_element { Some(ref pseudo) => self.pseudos_map.get(pseudo).unwrap(), None => &self.element_map, }; let mut shareable = true; // Step 1: Normal user-agent rules. map.user_agent.normal.get_all_matching_rules(element, parent_bf, applicable_declarations, &mut shareable); // Step 2: Presentational hints. let length = applicable_declarations.len(); element.synthesize_presentational_hints_for_legacy_attributes(applicable_declarations); if applicable_declarations.len() != length { // Never share style for elements with preshints shareable = false;<|fim▁hole|> map.user.normal.get_all_matching_rules(element, parent_bf, applicable_declarations, &mut shareable); map.author.normal.get_all_matching_rules(element, parent_bf, applicable_declarations, &mut shareable); // Step 4: Normal style attributes. style_attribute.map(|sa| { shareable = false; Push::push( applicable_declarations, GenericDeclarationBlock::from_declarations(sa.normal.clone())) }); // Step 5: Author-supplied `!important` rules. map.author.important.get_all_matching_rules(element, parent_bf, applicable_declarations, &mut shareable); // Step 6: `!important` style attributes. style_attribute.map(|sa| { shareable = false; Push::push( applicable_declarations, GenericDeclarationBlock::from_declarations(sa.important.clone())) }); // Step 7: User and UA `!important` rules. map.user.important.get_all_matching_rules(element, parent_bf, applicable_declarations, &mut shareable); map.user_agent.important.get_all_matching_rules(element, parent_bf, applicable_declarations, &mut shareable); shareable } #[inline] pub fn is_device_dirty(&self) -> bool { self.is_device_dirty } #[inline] pub fn animations(&self) -> &HashMap<Atom, KeyframesAnimation> { &self.animations } pub fn compute_restyle_hint<E>(&self, element: &E, snapshot: &E::Snapshot, // NB: We need to pass current_state as an argument because // selectors::Element doesn't provide access to ElementState // directly, and computing it from the ElementState would be // more expensive than getting it directly from the caller. current_state: ElementState) -> RestyleHint where E: ElementExt + Clone { self.state_deps.compute_hint(element, snapshot, current_state) } } /// Map that contains the CSS rules for a given origin. #[cfg_attr(feature = "servo", derive(HeapSizeOf))] struct PerOriginSelectorMap { /// Rules that contains at least one property declararion with /// normal importance. normal: SelectorMap<Vec<PropertyDeclaration>, TheSelectorImpl>, /// Rules that contains at least one property declararion with /// !important. important: SelectorMap<Vec<PropertyDeclaration>, TheSelectorImpl>, } impl PerOriginSelectorMap { #[inline] fn new() -> Self { PerOriginSelectorMap { normal: SelectorMap::new(), important: SelectorMap::new(), } } } /// Map that contains the CSS rules for a specific PseudoElement /// (or lack of PseudoElement). #[cfg_attr(feature = "servo", derive(HeapSizeOf))] struct PerPseudoElementSelectorMap { /// Rules from user agent stylesheets user_agent: PerOriginSelectorMap, /// Rules from author stylesheets author: PerOriginSelectorMap, /// Rules from user stylesheets user: PerOriginSelectorMap, } impl PerPseudoElementSelectorMap { #[inline] fn new() -> Self { PerPseudoElementSelectorMap { user_agent: PerOriginSelectorMap::new(), author: PerOriginSelectorMap::new(), user: PerOriginSelectorMap::new(), } } #[inline] fn borrow_for_origin(&mut self, origin: &Origin) -> &mut PerOriginSelectorMap { match *origin { Origin::UserAgent => &mut self.user_agent, Origin::Author => &mut self.author, Origin::User => &mut self.user, } } }<|fim▁end|>
} // Step 3: User and author normal rules.
<|file_name|>callee.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Handles translation of callees as well as other call-related //! things. Callees are a superset of normal rust values and sometimes //! have different representations. In particular, top-level fn items //! and methods are represented as just a fn ptr and not a full //! closure. pub use self::AutorefArg::*; pub use self::CalleeData::*; pub use self::CallArgs::*; use arena::TypedArena; use back::link; use session; use llvm::{ValueRef}; use llvm::get_param; use llvm; use metadata::csearch; use middle::def; use middle::subst; use middle::subst::{Subst, Substs}; use trans::adt; use trans::base; use trans::base::*; use trans::build::*; use trans::callee; use trans::cleanup; use trans::cleanup::CleanupMethods; use trans::closure; use trans::common::{self, Block, Result, NodeIdAndSpan, ExprId, CrateContext, ExprOrMethodCall, FunctionContext, MethodCallKey}; use trans::consts; use trans::datum::*; use trans::debuginfo::{DebugLoc, ToDebugLoc}; use trans::expr; use trans::glue; use trans::inline; use trans::foreign; use trans::intrinsic; use trans::meth; use trans::monomorphize; use trans::type_::Type; use trans::type_of; use middle::ty::{self, Ty}; use middle::ty::MethodCall; use util::ppaux::Repr; use util::ppaux::ty_to_string; use syntax::abi as synabi; use syntax::ast; use syntax::ast_map; use syntax::ptr::P; #[derive(Copy)] pub struct MethodData { pub llfn: ValueRef, pub llself: ValueRef, } pub enum CalleeData<'tcx> { // Constructor for enum variant/tuple-like-struct // i.e. Some, Ok NamedTupleConstructor(subst::Substs<'tcx>, ty::Disr), // Represents a (possibly monomorphized) top-level fn item or method // item. Note that this is just the fn-ptr and is not a Rust closure // value (which is a pair). Fn(/* llfn */ ValueRef), Intrinsic(ast::NodeId, subst::Substs<'tcx>), TraitItem(MethodData) } pub struct Callee<'blk, 'tcx: 'blk> { pub bcx: Block<'blk, 'tcx>, pub data: CalleeData<'tcx>, } fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) -> Callee<'blk, 'tcx> { let _icx = push_ctxt("trans_callee"); debug!("callee::trans(expr={})", expr.repr(bcx.tcx())); // pick out special kinds of expressions that can be called: match expr.node { ast::ExprPath(..) => { return trans_def(bcx, bcx.def(expr.id), expr); } _ => {} } // any other expressions are closures: return datum_callee(bcx, expr); fn datum_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) -> Callee<'blk, 'tcx> { let DatumBlock { bcx, datum, .. } = expr::trans(bcx, expr); match datum.ty.sty { ty::ty_bare_fn(..) => { let llval = datum.to_llscalarish(bcx); return Callee { bcx: bcx, data: Fn(llval), }; } _ => { bcx.tcx().sess.span_bug( expr.span, &format!("type of callee is neither bare-fn nor closure: \ {}", bcx.ty_to_string(datum.ty))); } } } fn fn_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, llfn: ValueRef) -> Callee<'blk, 'tcx> { return Callee { bcx: bcx, data: Fn(llfn), }; } fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, def: def::Def, ref_expr: &ast::Expr) -> Callee<'blk, 'tcx> { debug!("trans_def(def={}, ref_expr={})", def.repr(bcx.tcx()), ref_expr.repr(bcx.tcx())); let expr_ty = common::node_id_type(bcx, ref_expr.id); match def { def::DefFn(did, _) if { let maybe_def_id = inline::get_local_instance(bcx.ccx(), did); let maybe_ast_node = maybe_def_id.and_then(|def_id| bcx.tcx().map .find(def_id.node)); match maybe_ast_node { Some(ast_map::NodeStructCtor(_)) => true, _ => false } } => { let substs = common::node_id_substs(bcx.ccx(), ExprId(ref_expr.id), bcx.fcx.param_substs); Callee { bcx: bcx, data: NamedTupleConstructor(substs, 0) } } def::DefFn(did, _) if match expr_ty.sty { ty::ty_bare_fn(_, ref f) => f.abi == synabi::RustIntrinsic, _ => false } => { let substs = common::node_id_substs(bcx.ccx(), ExprId(ref_expr.id), bcx.fcx.param_substs); let def_id = inline::maybe_instantiate_inline(bcx.ccx(), did); Callee { bcx: bcx, data: Intrinsic(def_id.node, substs) } } def::DefFn(did, _) | def::DefMethod(did, def::FromImpl(_)) => { fn_callee(bcx, trans_fn_ref(bcx.ccx(), did, ExprId(ref_expr.id), bcx.fcx.param_substs).val) } def::DefMethod(meth_did, def::FromTrait(trait_did)) => { fn_callee(bcx, meth::trans_static_method_callee(bcx.ccx(), meth_did, trait_did, ref_expr.id, bcx.fcx.param_substs).val) } def::DefVariant(tid, vid, _) => { let vinfo = ty::enum_variant_with_id(bcx.tcx(), tid, vid); let substs = common::node_id_substs(bcx.ccx(), ExprId(ref_expr.id), bcx.fcx.param_substs); // Nullary variants are not callable assert!(vinfo.args.len() > 0); Callee { bcx: bcx, data: NamedTupleConstructor(substs, vinfo.disr_val) } } def::DefStruct(_) => { let substs = common::node_id_substs(bcx.ccx(), ExprId(ref_expr.id), bcx.fcx.param_substs); Callee { bcx: bcx, data: NamedTupleConstructor(substs, 0) } } def::DefStatic(..) | def::DefConst(..) | def::DefLocal(..) | def::DefUpvar(..) => { datum_callee(bcx, ref_expr) } def::DefMod(..) | def::DefForeignMod(..) | def::DefTrait(..) | def::DefTy(..) | def::DefPrimTy(..) | def::DefAssociatedTy(..) | def::DefUse(..) | def::DefRegion(..) | def::DefLabel(..) | def::DefTyParam(..) | def::DefSelfTy(..) => { bcx.tcx().sess.span_bug( ref_expr.span, &format!("cannot translate def {:?} \ to a callable thing!", def)); } } } } /// Translates a reference (with id `ref_id`) to the fn/method with id `def_id` into a function /// pointer. This may require monomorphization or inlining. pub fn trans_fn_ref<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: ast::DefId, node: ExprOrMethodCall, param_substs: &'tcx subst::Substs<'tcx>) -> Datum<'tcx, Rvalue> { let _icx = push_ctxt("trans_fn_ref"); let substs = common::node_id_substs(ccx, node, param_substs); debug!("trans_fn_ref(def_id={}, node={:?}, substs={})", def_id.repr(ccx.tcx()), node, substs.repr(ccx.tcx())); trans_fn_ref_with_substs(ccx, def_id, node, param_substs, substs) } fn trans_fn_ref_with_substs_to_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, def_id: ast::DefId, ref_id: ast::NodeId, substs: subst::Substs<'tcx>) -> Callee<'blk, 'tcx> { Callee { bcx: bcx, data: Fn(trans_fn_ref_with_substs(bcx.ccx(), def_id, ExprId(ref_id), bcx.fcx.param_substs, substs).val), } } /// Translates an adapter that implements the `Fn` trait for a fn /// pointer. This is basically the equivalent of something like: /// /// ``` /// impl<'a> Fn(&'a int) -> &'a int for fn(&int) -> &int { /// extern "rust-abi" fn call(&self, args: (&'a int,)) -> &'a int { /// (*self)(args.0) /// } /// } /// ``` /// /// but for the bare function type given. pub fn trans_fn_pointer_shim<'a, 'tcx>( ccx: &'a CrateContext<'a, 'tcx>, bare_fn_ty: Ty<'tcx>) -> ValueRef { let _icx = push_ctxt("trans_fn_pointer_shim"); let tcx = ccx.tcx(); let bare_fn_ty = common::erase_regions(tcx, &bare_fn_ty); match ccx.fn_pointer_shims().borrow().get(&bare_fn_ty) { Some(&llval) => { return llval; } None => { } } debug!("trans_fn_pointer_shim(bare_fn_ty={})", bare_fn_ty.repr(tcx)); // This is an impl of `Fn` trait, so receiver is `&self`. let bare_fn_ty_ref = ty::mk_imm_rptr(tcx, tcx.mk_region(ty::ReStatic), bare_fn_ty); // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`, // which is the fn pointer, and `args`, which is the arguments tuple. let (opt_def_id, sig) = match bare_fn_ty.sty { ty::ty_bare_fn(opt_def_id, &ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: synabi::Rust, ref sig }) => { (opt_def_id, sig) } _ => { tcx.sess.bug(&format!("trans_fn_pointer_shim invoked on invalid type: {}", bare_fn_ty.repr(tcx))); } }; let sig = ty::erase_late_bound_regions(tcx, sig); let tuple_input_ty = ty::mk_tup(tcx, sig.inputs.to_vec()); let tuple_fn_ty = ty::mk_bare_fn(tcx, opt_def_id, tcx.mk_bare_fn(ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: synabi::RustCall, sig: ty::Binder(ty::FnSig { inputs: vec![bare_fn_ty_ref, tuple_input_ty], output: sig.output, variadic: false })})); debug!("tuple_fn_ty: {}", tuple_fn_ty.repr(tcx)); // let function_name = link::mangle_internal_name_by_type_and_seq(ccx, bare_fn_ty, "fn_pointer_shim"); let llfn = decl_internal_rust_fn(ccx, tuple_fn_ty, &function_name[..]); // let empty_substs = tcx.mk_substs(Substs::trans_empty()); let (block_arena, fcx): (TypedArena<_>, FunctionContext); block_arena = TypedArena::new(); fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false, sig.output, empty_substs, None, &block_arena); let mut bcx = init_function(&fcx, false, sig.output); // the first argument (`self`) will be ptr to the the fn pointer let llfnpointer = Load(bcx, get_param(fcx.llfn, fcx.arg_pos(0) as u32)); // the remaining arguments will be the untupled values let llargs: Vec<_> = sig.inputs.iter() .enumerate() .map(|(i, _)| get_param(fcx.llfn, fcx.arg_pos(i+1) as u32)) .collect(); assert!(!fcx.needs_ret_allocas); let dest = fcx.llretslotptr.get().map(|_| expr::SaveIn(fcx.get_ret_slot(bcx, sig.output, "ret_slot")) ); bcx = trans_call_inner(bcx, DebugLoc::None, bare_fn_ty, |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) }, ArgVals(&llargs[..]), dest).bcx; finish_fn(&fcx, bcx, sig.output, DebugLoc::None); ccx.fn_pointer_shims().borrow_mut().insert(bare_fn_ty, llfn); llfn } /// Translates a reference to a fn/method item, monomorphizing and /// inlining as it goes. /// /// # Parameters /// /// - `ccx`: the crate context /// - `def_id`: def id of the fn or method item being referenced /// - `node`: node id of the reference to the fn/method, if applicable. /// This parameter may be zero; but, if so, the resulting value may not /// have the right type, so it must be cast before being used. /// - `param_substs`: if the `node` is in a polymorphic function, these /// are the substitutions required to monomorphize its type /// - `substs`: values for each of the fn/method's parameters pub fn trans_fn_ref_with_substs<'a, 'tcx>( ccx: &CrateContext<'a, 'tcx>, def_id: ast::DefId, node: ExprOrMethodCall, param_substs: &'tcx subst::Substs<'tcx>, substs: subst::Substs<'tcx>) -> Datum<'tcx, Rvalue> { let _icx = push_ctxt("trans_fn_ref_with_substs"); let tcx = ccx.tcx(); debug!("trans_fn_ref_with_substs(def_id={}, node={:?}, \ param_substs={}, substs={})", def_id.repr(tcx), node, param_substs.repr(tcx), substs.repr(tcx)); assert!(substs.types.all(|t| !ty::type_needs_infer(*t))); assert!(substs.types.all(|t| !ty::type_has_escaping_regions(*t))); let substs = substs.erase_regions(); // Load the info for the appropriate trait if necessary. match ty::trait_of_item(tcx, def_id) { None => {} Some(trait_id) => { ty::populate_implementations_for_trait_if_necessary(tcx, trait_id) } } // We need to do a bunch of special handling for default methods. // We need to modify the def_id and our substs in order to monomorphize // the function. let (is_default, def_id, substs) = match ty::provided_source(tcx, def_id) { None => { (false, def_id, tcx.mk_substs(substs)) } Some(source_id) => { // There are two relevant substitutions when compiling // default methods. First, there is the substitution for // the type parameters of the impl we are using and the // method we are calling. This substitution is the substs // argument we already have. // In order to compile a default method, though, we need // to consider another substitution: the substitution for // the type parameters on trait; the impl we are using // implements the trait at some particular type // parameters, and we need to substitute for those first. // So, what we need to do is find this substitution and // compose it with the one we already have. let impl_id = ty::impl_or_trait_item(tcx, def_id).container() .id(); let impl_or_trait_item = ty::impl_or_trait_item(tcx, source_id); match impl_or_trait_item { ty::MethodTraitItem(method) => { let trait_ref = ty::impl_trait_ref(tcx, impl_id).unwrap(); // Compute the first substitution let first_subst = ty::make_substs_for_receiver_types(tcx, &*trait_ref, &*method) .erase_regions(); // And compose them let new_substs = tcx.mk_substs(first_subst.subst(tcx, &substs)); debug!("trans_fn_with_vtables - default method: \ substs = {}, trait_subst = {}, \ first_subst = {}, new_subst = {}", substs.repr(tcx), trait_ref.substs.repr(tcx), first_subst.repr(tcx), new_substs.repr(tcx)); (true, source_id, new_substs) } ty::TypeTraitItem(_) => { tcx.sess.bug("trans_fn_ref_with_vtables() tried \ to translate an associated type?!") } } } }; // If this is a closure, redirect to it. match closure::get_or_create_declaration_if_closure(ccx, def_id, substs) { None => {} Some(llfn) => return llfn, } // Check whether this fn has an inlined copy and, if so, redirect // def_id to the local id of the inlined copy. let def_id = inline::maybe_instantiate_inline(ccx, def_id); // We must monomorphise if the fn has type parameters, is a default method, // or is a named tuple constructor. let must_monomorphise = if !substs.types.is_empty() || is_default { true } else if def_id.krate == ast::LOCAL_CRATE { let map_node = session::expect( ccx.sess(), tcx.map.find(def_id.node), || "local item should be in ast map".to_string()); match map_node { ast_map::NodeVariant(v) => match v.node.kind { ast::TupleVariantKind(ref args) => args.len() > 0, _ => false }, ast_map::NodeStructCtor(_) => true, _ => false } } else { false }; // Create a monomorphic version of generic functions if must_monomorphise { // Should be either intra-crate or inlined. assert_eq!(def_id.krate, ast::LOCAL_CRATE); let opt_ref_id = match node { ExprId(id) => if id != 0 { Some(id) } else { None }, MethodCallKey(_) => None, }; let (val, fn_ty, must_cast) = monomorphize::monomorphic_fn(ccx, def_id, substs, opt_ref_id); if must_cast && node != ExprId(0) { // Monotype of the REFERENCE to the function (type params // are subst'd) let ref_ty = match node { ExprId(id) => ty::node_id_to_type(tcx, id), MethodCallKey(method_call) => { (*tcx.method_map.borrow())[method_call].ty } }; let ref_ty = monomorphize::apply_param_substs(tcx, param_substs, &ref_ty); let llptrty = type_of::type_of_fn_from_ty(ccx, ref_ty).ptr_to(); if llptrty != common::val_ty(val) { let val = consts::ptrcast(val, llptrty); return Datum::new(val, ref_ty, Rvalue::new(ByValue)); } } return Datum::new(val, fn_ty, Rvalue::new(ByValue)); } // Type scheme of the function item (may have type params) let fn_type_scheme = ty::lookup_item_type(tcx, def_id); let fn_type = monomorphize::normalize_associated_type(tcx, &fn_type_scheme.ty); // Find the actual function pointer. let mut val = { if def_id.krate == ast::LOCAL_CRATE { // Internal reference. get_item_val(ccx, def_id.node) } else { // External reference. trans_external_path(ccx, def_id, fn_type) } }; // This is subtle and surprising, but sometimes we have to bitcast // the resulting fn pointer. The reason has to do with external // functions. If you have two crates that both bind the same C // library, they may not use precisely the same types: for // example, they will probably each declare their own structs, // which are distinct types from LLVM's point of view (nominal // types). // // Now, if those two crates are linked into an application, and // they contain inlined code, you can wind up with a situation // where both of those functions wind up being loaded into this // application simultaneously. In that case, the same function // (from LLVM's point of view) requires two types. But of course // LLVM won't allow one function to have two types. // // What we currently do, therefore, is declare the function with // one of the two types (whichever happens to come first) and then // bitcast as needed when the function is referenced to make sure // it has the type we expect. // // This can occur on either a crate-local or crate-external // reference. It also occurs when testing libcore and in some // other weird situations. Annoying. let llty = type_of::type_of_fn_from_ty(ccx, fn_type); let llptrty = llty.ptr_to(); if common::val_ty(val) != llptrty { debug!("trans_fn_ref_with_vtables(): casting pointer!"); val = consts::ptrcast(val, llptrty); } else { debug!("trans_fn_ref_with_vtables(): not casting pointer!"); } Datum::new(val, fn_type, Rvalue::new(ByValue)) } // ______________________________________________________________________ // Translating calls pub fn trans_call<'a, 'blk, 'tcx>(in_cx: Block<'blk, 'tcx>, call_expr: &ast::Expr, f: &ast::Expr, args: CallArgs<'a, 'tcx>, dest: expr::Dest) -> Block<'blk, 'tcx> { let _icx = push_ctxt("trans_call"); trans_call_inner(in_cx, call_expr.debug_loc(), common::expr_ty_adjusted(in_cx, f), |cx, _| trans(cx, f), args, Some(dest)).bcx } pub fn trans_method_call<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, call_expr: &ast::Expr, rcvr: &ast::Expr, args: CallArgs<'a, 'tcx>, dest: expr::Dest) -> Block<'blk, 'tcx> { let _icx = push_ctxt("trans_method_call"); debug!("trans_method_call(call_expr={})", call_expr.repr(bcx.tcx())); let method_call = MethodCall::expr(call_expr.id); let method_ty = match bcx.tcx().method_map.borrow().get(&method_call) { Some(method) => match method.origin { ty::MethodTraitObject(_) => match method.ty.sty { ty::ty_bare_fn(_, ref fty) => { ty::mk_bare_fn(bcx.tcx(), None, meth::opaque_method_ty(bcx.tcx(), fty)) } _ => method.ty },<|fim▁hole|> }; trans_call_inner( bcx, call_expr.debug_loc(), common::monomorphize_type(bcx, method_ty), |cx, arg_cleanup_scope| { meth::trans_method_callee(cx, method_call, Some(rcvr), arg_cleanup_scope) }, args, Some(dest)).bcx } pub fn trans_lang_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, did: ast::DefId, args: &[ValueRef], dest: Option<expr::Dest>, debug_loc: DebugLoc) -> Result<'blk, 'tcx> { let fty = if did.krate == ast::LOCAL_CRATE { ty::node_id_to_type(bcx.tcx(), did.node) } else { csearch::get_type(bcx.tcx(), did).ty }; callee::trans_call_inner(bcx, debug_loc, fty, |bcx, _| { trans_fn_ref_with_substs_to_callee(bcx, did, 0, subst::Substs::trans_empty()) }, ArgVals(args), dest) } /// This behemoth of a function translates function calls. Unfortunately, in order to generate more /// efficient LLVM output at -O0, it has quite a complex signature (refactoring this into two /// functions seems like a good idea). /// /// In particular, for lang items, it is invoked with a dest of None, and in that case the return /// value contains the result of the fn. The lang item must not return a structural type or else /// all heck breaks loose. /// /// For non-lang items, `dest` is always Some, and hence the result is written into memory /// somewhere. Nonetheless we return the actual return value of the function. pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, debug_loc: DebugLoc, callee_ty: Ty<'tcx>, get_callee: F, args: CallArgs<'a, 'tcx>, dest: Option<expr::Dest>) -> Result<'blk, 'tcx> where F: FnOnce(Block<'blk, 'tcx>, cleanup::ScopeId) -> Callee<'blk, 'tcx>, { // Introduce a temporary cleanup scope that will contain cleanups // for the arguments while they are being evaluated. The purpose // this cleanup is to ensure that, should a panic occur while // evaluating argument N, the values for arguments 0...N-1 are all // cleaned up. If no panic occurs, the values are handed off to // the callee, and hence none of the cleanups in this temporary // scope will ever execute. let fcx = bcx.fcx; let ccx = fcx.ccx; let arg_cleanup_scope = fcx.push_custom_cleanup_scope(); let callee = get_callee(bcx, cleanup::CustomScope(arg_cleanup_scope)); let mut bcx = callee.bcx; let (abi, ret_ty) = match callee_ty.sty { ty::ty_bare_fn(_, ref f) => { let output = ty::erase_late_bound_regions(bcx.tcx(), &f.sig.output()); (f.abi, output) } _ => panic!("expected bare rust fn or closure in trans_call_inner") }; let (llfn, llenv, llself) = match callee.data { Fn(llfn) => { (llfn, None, None) } TraitItem(d) => { (d.llfn, None, Some(d.llself)) } Intrinsic(node, substs) => { assert!(abi == synabi::RustIntrinsic); assert!(dest.is_some()); let call_info = match debug_loc { DebugLoc::At(id, span) => NodeIdAndSpan { id: id, span: span }, DebugLoc::None => { bcx.sess().bug("No call info for intrinsic call?") } }; return intrinsic::trans_intrinsic_call(bcx, node, callee_ty, arg_cleanup_scope, args, dest.unwrap(), substs, call_info); } NamedTupleConstructor(substs, disr) => { assert!(dest.is_some()); fcx.pop_custom_cleanup_scope(arg_cleanup_scope); let ctor_ty = callee_ty.subst(bcx.tcx(), &substs); return base::trans_named_tuple_constructor(bcx, ctor_ty, disr, args, dest.unwrap(), debug_loc); } }; // Intrinsics should not become actual functions. // We trans them in place in `trans_intrinsic_call` assert!(abi != synabi::RustIntrinsic); let is_rust_fn = abi == synabi::Rust || abi == synabi::RustCall; // Generate a location to store the result. If the user does // not care about the result, just make a stack slot. let opt_llretslot = dest.and_then(|dest| match dest { expr::SaveIn(dst) => Some(dst), expr::Ignore => { let ret_ty = match ret_ty { ty::FnConverging(ret_ty) => ret_ty, ty::FnDiverging => ty::mk_nil(ccx.tcx()) }; if !is_rust_fn || type_of::return_uses_outptr(ccx, ret_ty) || bcx.fcx.type_needs_drop(ret_ty) { // Push the out-pointer if we use an out-pointer for this // return type, otherwise push "undef". if common::type_is_zero_size(ccx, ret_ty) { let llty = type_of::type_of(ccx, ret_ty); Some(common::C_undef(llty.ptr_to())) } else { Some(alloc_ty(bcx, ret_ty, "__llret")) } } else { None } } }); let mut llresult = unsafe { llvm::LLVMGetUndef(Type::nil(ccx).ptr_to().to_ref()) }; // The code below invokes the function, using either the Rust // conventions (if it is a rust fn) or the native conventions // (otherwise). The important part is that, when all is said // and done, either the return value of the function will have been // written in opt_llretslot (if it is Some) or `llresult` will be // set appropriately (otherwise). if is_rust_fn { let mut llargs = Vec::new(); if let (ty::FnConverging(ret_ty), Some(mut llretslot)) = (ret_ty, opt_llretslot) { if type_of::return_uses_outptr(ccx, ret_ty) { let llformal_ret_ty = type_of::type_of(ccx, ret_ty).ptr_to(); let llret_ty = common::val_ty(llretslot); if llformal_ret_ty != llret_ty { // this could happen due to e.g. subtyping debug!("casting actual return type ({}) to match formal ({})", bcx.llty_str(llret_ty), bcx.llty_str(llformal_ret_ty)); llretslot = PointerCast(bcx, llretslot, llformal_ret_ty); } llargs.push(llretslot); } } // Push the environment (or a trait object's self). match (llenv, llself) { (Some(llenv), None) => llargs.push(llenv), (None, Some(llself)) => llargs.push(llself), _ => {} } // Push the arguments. bcx = trans_args(bcx, args, callee_ty, &mut llargs, cleanup::CustomScope(arg_cleanup_scope), llself.is_some(), abi); fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean(); // Invoke the actual rust fn and update bcx/llresult. let (llret, b) = base::invoke(bcx, llfn, &llargs[..], callee_ty, debug_loc); bcx = b; llresult = llret; // If the Rust convention for this type is return via // the return value, copy it into llretslot. match (opt_llretslot, ret_ty) { (Some(llretslot), ty::FnConverging(ret_ty)) => { if !type_of::return_uses_outptr(bcx.ccx(), ret_ty) && !common::type_is_zero_size(bcx.ccx(), ret_ty) { store_ty(bcx, llret, llretslot, ret_ty) } } (_, _) => {} } } else { // Lang items are the only case where dest is None, and // they are always Rust fns. assert!(dest.is_some()); let mut llargs = Vec::new(); let arg_tys = match args { ArgExprs(a) => a.iter().map(|x| common::expr_ty(bcx, &**x)).collect(), _ => panic!("expected arg exprs.") }; bcx = trans_args(bcx, args, callee_ty, &mut llargs, cleanup::CustomScope(arg_cleanup_scope), false, abi); fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean(); bcx = foreign::trans_native_call(bcx, callee_ty, llfn, opt_llretslot.unwrap(), &llargs[..], arg_tys, debug_loc); } fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope); // If the caller doesn't care about the result of this fn call, // drop the temporary slot we made. match (dest, opt_llretslot, ret_ty) { (Some(expr::Ignore), Some(llretslot), ty::FnConverging(ret_ty)) => { // drop the value if it is not being saved. bcx = glue::drop_ty(bcx, llretslot, ret_ty, debug_loc); call_lifetime_end(bcx, llretslot); } _ => {} } if ret_ty == ty::FnDiverging { Unreachable(bcx); } Result::new(bcx, llresult) } pub enum CallArgs<'a, 'tcx> { // Supply value of arguments as a list of expressions that must be // translated. This is used in the common case of `foo(bar, qux)`. ArgExprs(&'a [P<ast::Expr>]), // Supply value of arguments as a list of LLVM value refs; frequently // used with lang items and so forth, when the argument is an internal // value. ArgVals(&'a [ValueRef]), // For overloaded operators: `(lhs, Vec(rhs, rhs_id), autoref)`. `lhs` // is the left-hand-side and `rhs/rhs_id` is the datum/expr-id of // the right-hand-side arguments (if any). `autoref` indicates whether the `rhs` // arguments should be auto-referenced ArgOverloadedOp(Datum<'tcx, Expr>, Vec<(Datum<'tcx, Expr>, ast::NodeId)>, bool), // Supply value of arguments as a list of expressions that must be // translated, for overloaded call operators. ArgOverloadedCall(Vec<&'a ast::Expr>), } fn trans_args_under_call_abi<'blk, 'tcx>( mut bcx: Block<'blk, 'tcx>, arg_exprs: &[P<ast::Expr>], fn_ty: Ty<'tcx>, llargs: &mut Vec<ValueRef>, arg_cleanup_scope: cleanup::ScopeId, ignore_self: bool) -> Block<'blk, 'tcx> { let args = ty::erase_late_bound_regions( bcx.tcx(), &ty::ty_fn_args(fn_ty)); // Translate the `self` argument first. if !ignore_self { let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &*arg_exprs[0])); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, args[0], arg_datum, arg_cleanup_scope, DontAutorefArg) })) } // Now untuple the rest of the arguments. let tuple_expr = &arg_exprs[1]; let tuple_type = common::node_id_type(bcx, tuple_expr.id); match tuple_type.sty { ty::ty_tup(ref field_types) => { let tuple_datum = unpack_datum!(bcx, expr::trans(bcx, &**tuple_expr)); let tuple_lvalue_datum = unpack_datum!(bcx, tuple_datum.to_lvalue_datum(bcx, "args", tuple_expr.id)); let repr = adt::represent_type(bcx.ccx(), tuple_type); let repr_ptr = &*repr; llargs.extend(field_types.iter().enumerate().map(|(i, field_type)| { let arg_datum = tuple_lvalue_datum.get_element( bcx, field_type, |srcval| { adt::trans_field_ptr(bcx, repr_ptr, srcval, 0, i) }).to_expr_datum(); unpack_result!(bcx, trans_arg_datum( bcx, field_type, arg_datum, arg_cleanup_scope, DontAutorefArg) ) })); } _ => { bcx.sess().span_bug(tuple_expr.span, "argument to `.call()` wasn't a tuple?!") } }; bcx } fn trans_overloaded_call_args<'blk, 'tcx>( mut bcx: Block<'blk, 'tcx>, arg_exprs: Vec<&ast::Expr>, fn_ty: Ty<'tcx>, llargs: &mut Vec<ValueRef>, arg_cleanup_scope: cleanup::ScopeId, ignore_self: bool) -> Block<'blk, 'tcx> { // Translate the `self` argument first. let arg_tys = ty::erase_late_bound_regions(bcx.tcx(), &ty::ty_fn_args(fn_ty)); if !ignore_self { let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_exprs[0])); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, arg_tys[0], arg_datum, arg_cleanup_scope, DontAutorefArg) })) } // Now untuple the rest of the arguments. let tuple_type = arg_tys[1]; match tuple_type.sty { ty::ty_tup(ref field_types) => { for (i, &field_type) in field_types.iter().enumerate() { let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_exprs[i + 1])); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, field_type, arg_datum, arg_cleanup_scope, DontAutorefArg) })) } } _ => { bcx.sess().span_bug(arg_exprs[0].span, "argument to `.call()` wasn't a tuple?!") } }; bcx } pub fn trans_args<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>, args: CallArgs<'a, 'tcx>, fn_ty: Ty<'tcx>, llargs: &mut Vec<ValueRef>, arg_cleanup_scope: cleanup::ScopeId, ignore_self: bool, abi: synabi::Abi) -> Block<'blk, 'tcx> { debug!("trans_args(abi={})", abi); let _icx = push_ctxt("trans_args"); let arg_tys = ty::erase_late_bound_regions(cx.tcx(), &ty::ty_fn_args(fn_ty)); let variadic = ty::fn_is_variadic(fn_ty); let mut bcx = cx; // First we figure out the caller's view of the types of the arguments. // This will be needed if this is a generic call, because the callee has // to cast her view of the arguments to the caller's view. match args { ArgExprs(arg_exprs) => { if abi == synabi::RustCall { // This is only used for direct calls to the `call`, // `call_mut` or `call_once` functions. return trans_args_under_call_abi(cx, arg_exprs, fn_ty, llargs, arg_cleanup_scope, ignore_self) } let num_formal_args = arg_tys.len(); for (i, arg_expr) in arg_exprs.iter().enumerate() { if i == 0 && ignore_self { continue; } let arg_ty = if i >= num_formal_args { assert!(variadic); common::expr_ty_adjusted(cx, &**arg_expr) } else { arg_tys[i] }; let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &**arg_expr)); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, arg_ty, arg_datum, arg_cleanup_scope, DontAutorefArg) })); } } ArgOverloadedCall(arg_exprs) => { return trans_overloaded_call_args(cx, arg_exprs, fn_ty, llargs, arg_cleanup_scope, ignore_self) } ArgOverloadedOp(lhs, rhs, autoref) => { assert!(!variadic); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, arg_tys[0], lhs, arg_cleanup_scope, DontAutorefArg) })); assert_eq!(arg_tys.len(), 1 + rhs.len()); for (rhs, rhs_id) in rhs { llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, arg_tys[1], rhs, arg_cleanup_scope, if autoref { DoAutorefArg(rhs_id) } else { DontAutorefArg }) })); } } ArgVals(vs) => { llargs.push_all(vs); } } bcx } #[derive(Copy)] pub enum AutorefArg { DontAutorefArg, DoAutorefArg(ast::NodeId) } pub fn trans_arg_datum<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, formal_arg_ty: Ty<'tcx>, arg_datum: Datum<'tcx, Expr>, arg_cleanup_scope: cleanup::ScopeId, autoref_arg: AutorefArg) -> Result<'blk, 'tcx> { let _icx = push_ctxt("trans_arg_datum"); let mut bcx = bcx; let ccx = bcx.ccx(); debug!("trans_arg_datum({})", formal_arg_ty.repr(bcx.tcx())); let arg_datum_ty = arg_datum.ty; debug!(" arg datum: {}", arg_datum.to_string(bcx.ccx())); let mut val; // FIXME(#3548) use the adjustments table match autoref_arg { DoAutorefArg(arg_id) => { // We will pass argument by reference // We want an lvalue, so that we can pass by reference and let arg_datum = unpack_datum!( bcx, arg_datum.to_lvalue_datum(bcx, "arg", arg_id)); val = arg_datum.val; } DontAutorefArg => { // Make this an rvalue, since we are going to be // passing ownership. let arg_datum = unpack_datum!( bcx, arg_datum.to_rvalue_datum(bcx, "arg")); // Now that arg_datum is owned, get it into the appropriate // mode (ref vs value). let arg_datum = unpack_datum!( bcx, arg_datum.to_appropriate_datum(bcx)); // Technically, ownership of val passes to the callee. // However, we must cleanup should we panic before the // callee is actually invoked. val = arg_datum.add_clean(bcx.fcx, arg_cleanup_scope); } } if formal_arg_ty != arg_datum_ty { // this could happen due to e.g. subtyping let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty); debug!("casting actual type ({}) to match formal ({})", bcx.val_to_string(val), bcx.llty_str(llformal_arg_ty)); debug!("Rust types: {}; {}", ty_to_string(bcx.tcx(), arg_datum_ty), ty_to_string(bcx.tcx(), formal_arg_ty)); val = PointerCast(bcx, val, llformal_arg_ty); } debug!("--- trans_arg_datum passing {}", bcx.val_to_string(val)); Result::new(bcx, val) }<|fim▁end|>
_ => method.ty }, None => panic!("method not found in trans_method_call")
<|file_name|>umbmediagrid.directive.js<|end_file_name|><|fim▁begin|><|fim▁hole|> function MediaGridDirective($filter, mediaHelper) { function link(scope, el, attr, ctrl) { var itemDefaultHeight = 200; var itemDefaultWidth = 200; var itemMaxWidth = 200; var itemMaxHeight = 200; var itemMinWidth = 125; var itemMinHeight = 125; function activate() { if (scope.itemMaxWidth) { itemMaxWidth = scope.itemMaxWidth; } if (scope.itemMaxHeight) { itemMaxHeight = scope.itemMaxHeight; } if (scope.itemMinWidth) { itemMinWidth = scope.itemMinWidth; } if (scope.itemMinWidth) { itemMinHeight = scope.itemMinHeight; } for (var i = 0; scope.items.length > i; i++) { var item = scope.items[i]; setItemData(item); setOriginalSize(item, itemMaxHeight); } if (scope.items.length > 0) { setFlexValues(scope.items); } } function setItemData(item) { item.isFolder = !mediaHelper.hasFilePropertyType(item); if (!item.isFolder) { item.thumbnail = mediaHelper.resolveFile(item, true); item.image = mediaHelper.resolveFile(item, false); } } function setOriginalSize(item, maxHeight) { //set to a square by default item.width = itemDefaultWidth; item.height = itemDefaultHeight; item.aspectRatio = 1; var widthProp = _.find(item.properties, function(v) { return (v.alias === "umbracoWidth"); }); if (widthProp && widthProp.value) { item.width = parseInt(widthProp.value, 10); if (isNaN(item.width)) { item.width = itemDefaultWidth; } } var heightProp = _.find(item.properties, function(v) { return (v.alias === "umbracoHeight"); }); if (heightProp && heightProp.value) { item.height = parseInt(heightProp.value, 10); if (isNaN(item.height)) { item.height = itemDefaultWidth; } } item.aspectRatio = item.width / item.height; // set max width and height // landscape if (item.aspectRatio >= 1) { if (item.width > itemMaxWidth) { item.width = itemMaxWidth; item.height = itemMaxWidth / item.aspectRatio; } // portrait } else { if (item.height > itemMaxHeight) { item.height = itemMaxHeight; item.width = itemMaxHeight * item.aspectRatio; } } } function setFlexValues(mediaItems) { var flexSortArray = mediaItems; var smallestImageWidth = null; var widestImageAspectRatio = null; // sort array after image width with the widest image first flexSortArray = $filter('orderBy')(flexSortArray, 'width', true); // find widest image aspect ratio widestImageAspectRatio = flexSortArray[0].aspectRatio; // find smallest image width smallestImageWidth = flexSortArray[flexSortArray.length - 1].width; for (var i = 0; flexSortArray.length > i; i++) { var mediaItem = flexSortArray[i]; var flex = 1 / (widestImageAspectRatio / mediaItem.aspectRatio); if (flex === 0) { flex = 1; } var imageMinFlexWidth = smallestImageWidth * flex; var flexStyle = { "flex": flex + " 1 " + imageMinFlexWidth + "px", "max-width": mediaItem.width + "px", "min-width": itemMinWidth + "px", "min-height": itemMinHeight + "px" }; mediaItem.flexStyle = flexStyle; } } scope.clickItem = function(item, $event, $index) { if (scope.onClick) { scope.onClick(item, $event, $index); } }; scope.clickItemName = function(item, $event, $index) { if (scope.onClickName) { scope.onClickName(item, $event, $index); $event.stopPropagation(); } }; scope.hoverItemDetails = function(item, $event, hover) { if (scope.onDetailsHover) { scope.onDetailsHover(item, $event, hover); } }; var unbindItemsWatcher = scope.$watch('items', function(newValue, oldValue) { if (angular.isArray(newValue)) { activate(); } }); scope.$on('$destroy', function() { unbindItemsWatcher(); }); } var directive = { restrict: 'E', replace: true, templateUrl: 'views/components/umb-media-grid.html', scope: { items: '=', onDetailsHover: "=", onClick: '=', onClickName: "=", filterBy: "=", itemMaxWidth: "@", itemMaxHeight: "@", itemMinWidth: "@", itemMinHeight: "@" }, link: link }; return directive; } angular.module('umbraco.directives').directive('umbMediaGrid', MediaGridDirective); })();<|fim▁end|>
(function() { 'use strict';
<|file_name|>graphvizexport.hpp<|end_file_name|><|fim▁begin|>/******************************************************************************* * ISPTAP - Instruction Scratchpad Timing Analysis Program * Copyright (C) 2013 Stefan Metzlaff, University of Augsburg, Germany * URL: <https://github.com/smetzlaff/isptap> * * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * any later version. * * This program is distributed in the hope that it will be useful,<|fim▁hole|> * You should have received a copy of the GNU General Public License * along with this program, see <LICENSE>. If not, see * <http://www.gnu.org/licenses/>. ******************************************************************************/ #ifndef _GRAPHVIZEXPORT_HPP_ #define _GRAPHVIZEXPORT_HPP_ #include "global.h" #include "graph_structure.h" #include <boost/graph/graphviz.hpp> /** * Export object for ControlFlogGraphs * Graphviz file is created, but the vertex and edge preferences are currently not written. */ class GraphVizExport { public: GraphVizExport(string filename); virtual ~GraphVizExport(); bool exportGraph(ControlFlowGraph cfg); bool exportGraph(ControlFlowGraph cfg, startaddrstring_t, edgename_t); bool exportGraph(FunctionCallGraph fcg); bool exportGraph(MemoryStateGraph msg); bool exportGraph(AbsStackMemGraph asmg); private: string export_filename; ofstream export_file_stream; }; #endif<|fim▁end|>
* but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. *
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // Servo, the mighty web browser engine from the future. // // This is a very simple library that wires all of Servo's components // together as type `Browser`, along with a generic client // implementing the `WindowMethods` trait, to create a working web // browser. // // The `Browser` type is responsible for configuring a // `Constellation`, which does the heavy lifting of coordinating all // of Servo's internal subsystems, including the `ScriptTask` and the // `LayoutTask`, as well maintains the navigation context. // // The `Browser` is fed events from a generic type that implements the // `WindowMethods` trait. extern crate gaol; #[macro_use] extern crate util as _util; <|fim▁hole|> extern crate canvas; extern crate canvas_traits; extern crate compositing; extern crate devtools; extern crate devtools_traits; extern crate euclid; extern crate gfx; extern crate gleam; extern crate ipc_channel; extern crate layers; extern crate layout; extern crate msg; extern crate net; extern crate net_traits; extern crate profile; extern crate profile_traits; extern crate script; extern crate script_traits; extern crate style; extern crate url; } extern crate libc; #[cfg(feature = "webdriver")] extern crate webdriver_server; #[cfg(feature = "webdriver")] fn webdriver(port: u16, constellation: Sender<ConstellationMsg>) { webdriver_server::start_server(port, constellation); } #[cfg(not(feature = "webdriver"))] fn webdriver(_port: u16, _constellation: Sender<ConstellationMsg>) { } use compositing::CompositorEventListener; use compositing::compositor_task::InitialCompositorState; use compositing::constellation::InitialConstellationState; use compositing::pipeline::UnprivilegedPipelineContent; use compositing::sandboxing; use compositing::windowing::WindowEvent; use compositing::windowing::WindowMethods; use compositing::{CompositorProxy, CompositorTask, Constellation}; use gaol::sandbox::{ChildSandbox, ChildSandboxMethods}; use gfx::font_cache_task::FontCacheTask; use ipc_channel::ipc::{self, IpcSender}; use msg::constellation_msg::CompositorMsg as ConstellationMsg; use net::image_cache_task::new_image_cache_task; use net::resource_task::new_resource_task; use net::storage_task::StorageTaskFactory; use net_traits::storage_task::StorageTask; use profile::mem as profile_mem; use profile::time as profile_time; use profile_traits::mem; use profile_traits::time; use std::borrow::Borrow; use std::rc::Rc; use std::sync::mpsc::Sender; use util::opts; pub use _util as util; pub use export::canvas; pub use export::canvas_traits; pub use export::compositing; pub use export::devtools; pub use export::devtools_traits; pub use export::euclid; pub use export::gfx; pub use export::gleam::gl; pub use export::ipc_channel; pub use export::layers; pub use export::layout; pub use export::msg; pub use export::net; pub use export::net_traits; pub use export::profile; pub use export::profile_traits; pub use export::script; pub use export::script_traits; pub use export::style; pub use export::url; pub struct Browser { compositor: Box<CompositorEventListener + 'static>, } /// The in-process interface to Servo. /// /// It does everything necessary to render the web, primarily /// orchestrating the interaction between JavaScript, CSS layout, /// rendering, and the client window. /// /// Clients create a `Browser` for a given reference-counted type /// implementing `WindowMethods`, which is the bridge to whatever /// application Servo is embedded in. Clients then create an event /// loop to pump messages between the embedding application and /// various browser components. impl Browser { pub fn new<Window>(window: Option<Rc<Window>>) -> Browser where Window: WindowMethods + 'static { // Global configuration options, parsed from the command line. let opts = opts::get(); script::init(); // Get both endpoints of a special channel for communication between // the client window and the compositor. This channel is unique because // messages to client may need to pump a platform-specific event loop // to deliver the message. let (compositor_proxy, compositor_receiver) = WindowMethods::create_compositor_channel(&window); let supports_clipboard = match window { Some(ref win_rc) => { let win: &Window = win_rc.borrow(); win.supports_clipboard() } None => false }; let time_profiler_chan = profile_time::Profiler::create(opts.time_profiler_period); let mem_profiler_chan = profile_mem::Profiler::create(opts.mem_profiler_period); let devtools_chan = opts.devtools_port.map(|port| { devtools::start_server(port) }); // Create the constellation, which maintains the engine // pipelines, including the script and layout threads, as well // as the navigation context. let constellation_chan = create_constellation(opts.clone(), compositor_proxy.clone_compositor_proxy(), time_profiler_chan.clone(), mem_profiler_chan.clone(), devtools_chan, supports_clipboard); if cfg!(feature = "webdriver") { if let Some(port) = opts.webdriver_port { webdriver(port, constellation_chan.clone()); } } // The compositor coordinates with the client window to create the final // rendered page and display it somewhere. let compositor = CompositorTask::create(window, InitialCompositorState { sender: compositor_proxy, receiver: compositor_receiver, constellation_chan: constellation_chan, time_profiler_chan: time_profiler_chan, mem_profiler_chan: mem_profiler_chan, }); Browser { compositor: compositor, } } pub fn handle_events(&mut self, events: Vec<WindowEvent>) -> bool { self.compositor.handle_events(events) } pub fn repaint_synchronously(&mut self) { self.compositor.repaint_synchronously() } pub fn pinch_zoom_level(&self) -> f32 { self.compositor.pinch_zoom_level() } pub fn request_title_for_main_frame(&self) { self.compositor.title_for_main_frame() } } fn create_constellation(opts: opts::Opts, compositor_proxy: Box<CompositorProxy + Send>, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: mem::ProfilerChan, devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>, supports_clipboard: bool) -> Sender<ConstellationMsg> { let resource_task = new_resource_task(opts.user_agent.clone(), devtools_chan.clone()); let image_cache_task = new_image_cache_task(resource_task.clone()); let font_cache_task = FontCacheTask::new(resource_task.clone()); let storage_task: StorageTask = StorageTaskFactory::new(); let initial_state = InitialConstellationState { compositor_proxy: compositor_proxy, devtools_chan: devtools_chan, image_cache_task: image_cache_task, font_cache_task: font_cache_task, resource_task: resource_task, storage_task: storage_task, time_profiler_chan: time_profiler_chan, mem_profiler_chan: mem_profiler_chan, supports_clipboard: supports_clipboard, }; let constellation_chan = Constellation::<layout::layout_task::LayoutTask, script::script_task::ScriptTask>::start(initial_state); // Send the URL command to the constellation. match opts.url { Some(url) => { constellation_chan.send(ConstellationMsg::InitLoadUrl(url)).unwrap(); }, None => () }; constellation_chan } /// Content process entry point. pub fn run_content_process(token: String) { let (unprivileged_content_sender, unprivileged_content_receiver) = ipc::channel::<UnprivilegedPipelineContent>().unwrap(); let connection_bootstrap: IpcSender<IpcSender<UnprivilegedPipelineContent>> = IpcSender::connect(token).unwrap(); connection_bootstrap.send(unprivileged_content_sender).unwrap(); let unprivileged_content = unprivileged_content_receiver.recv().unwrap(); opts::set_defaults(unprivileged_content.opts()); // Enter the sandbox if necessary. if opts::get().sandbox { ChildSandbox::new(sandboxing::content_process_sandbox_profile()).activate().unwrap(); } script::init(); unprivileged_content.start_all::<layout::layout_task::LayoutTask, script::script_task::ScriptTask>(true); }<|fim▁end|>
mod export {
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.conf.urls import patterns, include, url from django.conf.urls.i18n import i18n_patterns from django.contrib import admin from mezzanine.core.views import direct_to_template from mezzanine.conf import settings from hs_core.api import v1_api from theme import views as theme import autocomplete_light autocomplete_light.autodiscover() admin.autodiscover() # Add the urlpatterns for any custom Django applications here. # You can also change the ``home`` view to add your own functionality # to the project's homepage. urlpatterns = i18n_patterns("", # Change the admin prefix here to use an alternate URL for the # admin interface, which would be marginally more secure. url("^admin/", include(admin.site.urls)), url('^ga_resources/', include('ga_resources.urls')), url('^ga_interactive/', include('ga_interactive.urls')), url('^r/(?P<shortkey>[A-z0-9\-_]+)', 'hs_core.views.short_url'), # url('^party/', include('hs_scholar_profile.urls')) url(r'^user/$', theme.UserProfileView.as_view()), url(r'^user/(?P<user>.*)/', theme.UserProfileView.as_view()), url(r'^verify/(?P<pk>[0-9]*)/', 'hs_core.views.verify'), url(r'^django_irods/', include('django_irods.urls')), url(r'^autocomplete/', include('autocomplete_light.urls')), url(r'^hs_metrics/', include('hs_metrics.urls')), ) # Filebrowser admin media library. if getattr(settings, "PACKAGE_NAME_FILEBROWSER") in settings.INSTALLED_APPS: urlpatterns += i18n_patterns("", ("^admin/media-library/", include("%s.urls" % settings.PACKAGE_NAME_FILEBROWSER)), ) # Put API URLs before Mezzanine so that Mezzanine doesn't consume them urlpatterns += patterns('', (r'^api/', include(v1_api.urls) ), url("^api/%s/doc/" % (v1_api.api_name,), include('tastypie_swagger.urls', namespace='tastypie_swagger'), kwargs={'tastypie_api_module':'hs_core.api.v1_api', 'namespace':'tastypie_swagger'} ), url('^hsapi/', include('hs_core.urls')) url('^party/', include('hs_party.urls')) ) urlpatterns += patterns('', # We don't want to presume how your homepage works, so here are a # few patterns you can use to set it up. # HOMEPAGE AS STATIC TEMPLATE # --------------------------- # This pattern simply loads the index.html template. It isn't # commented out like the others, so it's the default. You only need # one homepage pattern, so if you use a different one, comment this # one out. # url("^$", direct_to_template, {"template": "index.html"}, name="home"), # HOMEPAGE AS AN EDITABLE PAGE IN THE PAGE TREE # --------------------------------------------- # This pattern gives us a normal ``Page`` object, so that your # homepage can be managed via the page tree in the admin. If you # use this pattern, you'll need to create a page in the page tree, # and specify its URL (in the Meta Data section) as "/", which # is the value used below in the ``{"slug": "/"}`` part. # Also note that the normal rule of adding a custom # template per page with the template name using the page's slug # doesn't apply here, since we can't have a template called # "/.html" - so for this case, the template "pages/index.html" # should be used if you want to customize the homepage's template. url("^$", "mezzanine.pages.views.page", {"slug": "/"}, name="home"), # HOMEPAGE FOR A BLOG-ONLY SITE # ----------------------------- # This pattern points the homepage to the blog post listing page, # and is useful for sites that are primarily blogs. If you use this # pattern, you'll also need to set BLOG_SLUG = "" in your # ``settings.py`` module, and delete the blog page object from the # page tree in the admin if it was installed. # url("^$", "mezzanine.blog.views.blog_post_list", name="home"), # MEZZANINE'S URLS # ---------------- # ADD YOUR OWN URLPATTERNS *ABOVE* THE LINE BELOW. # ``mezzanine.urls`` INCLUDES A *CATCH ALL* PATTERN # FOR PAGES, SO URLPATTERNS ADDED BELOW ``mezzanine.urls`` # WILL NEVER BE MATCHED! # If you'd like more granular control over the patterns in # ``mezzanine.urls``, go right ahead and take the parts you want # from it, and use them directly below instead of using # ``mezzanine.urls``. ("^", include("mezzanine.urls")), # MOUNTING MEZZANINE UNDER A PREFIX # --------------------------------- # You can also mount all of Mezzanine's urlpatterns under a # URL prefix if desired. When doing this, you need to define the # ``SITE_PREFIX`` setting, which will contain the prefix. Eg: # SITE_PREFIX = "my/site/prefix" # For convenience, and to avoid repeating the prefix, use the # commented out pattern below (commenting out the one above of course) # which will make use of the ``SITE_PREFIX`` setting. Make sure to # add the import ``from django.conf import settings`` to the top # of this file as well. # Note that for any of the various homepage patterns above, you'll # need to use the ``SITE_PREFIX`` setting as well. # ("^%s/" % settings.SITE_PREFIX, include("mezzanine.urls"))<|fim▁hole|># pages can use JS, CSS and images. handler404 = "mezzanine.core.views.page_not_found" handler500 = "mezzanine.core.views.server_error"<|fim▁end|>
) # Adds ``STATIC_URL`` to the context of error pages, so that error
<|file_name|>htmldatalistelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::HTMLDataListElementBinding; use dom::bindings::codegen::Bindings::HTMLDataListElementBinding::HTMLDataListElementMethods; use dom::bindings::inheritance::Castable; use dom::bindings::root::DomRoot; use dom::document::Document; use dom::element::Element; use dom::htmlcollection::{CollectionFilter, HTMLCollection}; use dom::htmlelement::HTMLElement; use dom::htmloptionelement::HTMLOptionElement; use dom::node::{Node, window_from_node}; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLDataListElement { htmlelement: HTMLElement, } impl HTMLDataListElement { fn new_inherited( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> HTMLDataListElement { HTMLDataListElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), }<|fim▁hole|> pub fn new( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLDataListElement> { Node::reflect_node( Box::new(HTMLDataListElement::new_inherited( local_name, prefix, document, )), document, HTMLDataListElementBinding::Wrap, ) } } impl HTMLDataListElementMethods for HTMLDataListElement { // https://html.spec.whatwg.org/multipage/#dom-datalist-options fn Options(&self) -> DomRoot<HTMLCollection> { #[derive(JSTraceable, MallocSizeOf)] struct HTMLDataListOptionsFilter; impl CollectionFilter for HTMLDataListOptionsFilter { fn filter(&self, elem: &Element, _root: &Node) -> bool { elem.is::<HTMLOptionElement>() } } let filter = Box::new(HTMLDataListOptionsFilter); let window = window_from_node(self); HTMLCollection::create(&window, self.upcast(), filter) } }<|fim▁end|>
} #[allow(unrooted_must_root)]
<|file_name|>eclipse.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """GYP backend that generates Eclipse CDT settings files. This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML files that can be imported into an Eclipse CDT project. The XML file contains a list of include paths and symbols (i.e. defines). Because a full .cproject definition is not created by this generator, it's not possible to properly define the include dirs and symbols for each file individually. Instead, one set of includes/symbols is generated for the entire project. This works fairly well (and is a vast improvement in general), but may still result in a few indexer issues here and there. This generator has no automated tests, so expect it to be broken. """ from xml.sax.saxutils import escape import os.path import subprocess import gyp import gyp.common import gyp.msvs_emulation import shlex import xml.etree.cElementTree as ET generator_wants_static_library_dependencies_adjusted = False generator_default_variables = { } for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: # Some gyp steps fail if these are empty(!), so we convert them to variables generator_default_variables[dirname] = '$' + dirname for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' # Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as # part of the path when dealing with generated headers. This value will be # replaced dynamically for each configuration. generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ '$SHARED_INTERMEDIATE_DIR' def CalculateVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) for key, val in generator_flags.items(): default_variables.setdefault(key, val) flavor = gyp.common.GetFlavor(params) default_variables.setdefault('OS', flavor) if flavor == 'win': # Copy additional generator configuration data from VS, which is shared # by the Eclipse generator. import gyp.generator.msvs as msvs_generator generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by gyp).""" generator_flags = params.get('generator_flags', {}) if generator_flags.get('adjust_static_libraries', False): global generator_wants_static_library_dependencies_adjusted generator_wants_static_library_dependencies_adjusted = True <|fim▁hole|> def GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path): """Calculate the set of include directories to be used. Returns: A list including all the include_dir's specified for every target followed by any include directories that were added as cflag compiler options. """ gyp_includes_set = set() compiler_includes_list = [] # Find compiler's default include dirs. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-xc++', '-v', '-']) proc = subprocess.Popen(args=command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.communicate()[1] # Extract the list of include dirs from the output, which has this format: # ... # #include "..." search starts here: # #include <...> search starts here: # /usr/include/c++/4.6 # /usr/local/include # End of search list. # ... in_include_list = False for line in output.splitlines(): if line.startswith('#include'): in_include_list = True continue if line.startswith('End of search list.'): break if in_include_list: include_dir = line.strip() if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if config_name in target['configurations']: config = target['configurations'][config_name] # Look for any include dirs that were explicitly added via cflags. This # may be done in gyp files to force certain includes to come at the end. # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and # remove this. if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) cflags = msvs_settings.GetCflags(config_name) else: cflags = config['cflags'] for cflag in cflags: if cflag.startswith('-I'): include_dir = cflag[2:] if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) # Find standard gyp include dirs. if config.has_key('include_dirs'): include_dirs = config['include_dirs'] for shared_intermediate_dir in shared_intermediate_dirs: for include_dir in include_dirs: include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', shared_intermediate_dir) if not os.path.isabs(include_dir): base_dir = os.path.dirname(target_name) include_dir = base_dir + '/' + include_dir include_dir = os.path.abspath(include_dir) gyp_includes_set.add(include_dir) # Generate a list that has all the include dirs. all_includes_list = list(gyp_includes_set) all_includes_list.sort() for compiler_include in compiler_includes_list: if not compiler_include in gyp_includes_set: all_includes_list.append(compiler_include) # All done. return all_includes_list def GetCompilerPath(target_list, data, options): """Determine a command that can be used to invoke the compiler. Returns: If this is a gyp project that has explicit make settings, try to determine the compiler from that. Otherwise, see if a compiler was specified via the CC_target environment variable. """ # First, see if the compiler is configured in make's settings. build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_dict = data[build_file].get('make_global_settings', {}) for key, value in make_global_settings_dict: if key in ['CC', 'CXX']: return os.path.join(options.toplevel_dir, value) # Check to see if the compiler was specified as an environment variable. for key in ['CC_target', 'CC', 'CXX']: compiler = os.environ.get(key) if compiler: return compiler return 'gcc' def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): """Calculate the defines for a project. Returns: A dict that includes explict defines declared in gyp files along with all of the default defines that the compiler uses. """ # Get defines declared in the gyp files. all_defines = {} flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) extra_defines = msvs_settings.GetComputedDefines(config_name) else: extra_defines = [] if config_name in target['configurations']: config = target['configurations'][config_name] target_defines = config['defines'] else: target_defines = [] for define in target_defines + extra_defines: split_define = define.split('=', 1) if len(split_define) == 1: split_define.append('1') if split_define[0].strip() in all_defines: # Already defined continue all_defines[split_define[0].strip()] = split_define[1].strip() # Get default compiler defines (if possible). if flavor == 'win': return all_defines # Default defines already processed in the loop above. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-dM', '-']) cpp_proc = subprocess.Popen(args=command, cwd='.', stdin=subprocess.PIPE, stdout=subprocess.PIPE) cpp_output = cpp_proc.communicate()[0] cpp_lines = cpp_output.split('\n') for cpp_line in cpp_lines: if not cpp_line.strip(): continue cpp_line_parts = cpp_line.split(' ', 2) key = cpp_line_parts[1] if len(cpp_line_parts) >= 3: val = cpp_line_parts[2] else: val = '1' all_defines[key] = val return all_defines def WriteIncludePaths(out, eclipse_langs, include_dirs): """Write the includes section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.IncludePaths">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for include_dir in include_dirs: out.write(' <includepath workspace_path="false">%s</includepath>\n' % include_dir) out.write(' </language>\n') out.write(' </section>\n') def WriteMacros(out, eclipse_langs, defines): """Write the macros section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.Macros">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for key in sorted(defines.iterkeys()): out.write(' <macro><name>%s</name><value>%s</value></macro>\n' % (escape(key), escape(defines[key]))) out.write(' </language>\n') out.write(' </section>\n') def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] generator_flags = params.get('generator_flags', {}) # build_dir: relative path from source root to our output files. # e.g. "out/Debug" build_dir = os.path.join(generator_flags.get('output_dir', 'out'), config_name) toplevel_build = os.path.join(options.toplevel_dir, build_dir) # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the # SHARED_INTERMEDIATE_DIR. Include both possible locations. shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), os.path.join(toplevel_build, 'gen')] GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), options, shared_intermediate_dirs) GenerateClasspathFile(target_list, target_dicts, options.toplevel_dir, toplevel_build, os.path.join(toplevel_build, 'eclipse-classpath.xml')) def GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, out_name, options, shared_intermediate_dirs): gyp.common.EnsureDirExists(out_name) with open(out_name, 'w') as out: out.write('<?xml version="1.0" encoding="UTF-8"?>\n') out.write('<cdtprojectproperties>\n') eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', 'GNU C++', 'GNU C', 'Assembly'] compiler_path = GetCompilerPath(target_list, data, options) include_dirs = GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path) WriteIncludePaths(out, eclipse_langs, include_dirs) defines = GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path) WriteMacros(out, eclipse_langs, defines) out.write('</cdtprojectproperties>\n') def GenerateClasspathFile(target_list, target_dicts, toplevel_dir, toplevel_build, out_name): '''Generates a classpath file suitable for symbol navigation and code completion of Java code (such as in Android projects) by finding all .java and .jar files used as action inputs.''' gyp.common.EnsureDirExists(out_name) result = ET.Element('classpath') def AddElements(kind, paths): # First, we need to normalize the paths so they are all relative to the # toplevel dir. rel_paths = set() for path in paths: if os.path.isabs(path): rel_paths.add(os.path.relpath(path, toplevel_dir)) else: rel_paths.add(path) for path in sorted(rel_paths): entry_element = ET.SubElement(result, 'classpathentry') entry_element.set('kind', kind) entry_element.set('path', path) AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir)) AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) # Include the standard JRE container and a dummy out folder AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER']) # Include a dummy out folder so that Eclipse doesn't use the default /bin # folder in the root of the project. AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')]) ET.ElementTree(result).write(out_name) def GetJavaJars(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all .jars used as inputs.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'): if os.path.isabs(input_): yield input_ else: yield os.path.join(os.path.dirname(target_name), input_) def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all likely java package root directories.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if (os.path.splitext(input_)[1] == '.java' and not input_.startswith('$')): dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name), input_)) # If there is a parent 'src' or 'java' folder, navigate up to it - # these are canonical package root names in Chromium. This will # break if 'src' or 'java' exists in the package structure. This # could be further improved by inspecting the java file for the # package name if this proves to be too fragile in practice. parent_search = dir_ while os.path.basename(parent_search) not in ['src', 'java']: parent_search, _ = os.path.split(parent_search) if not parent_search or parent_search == toplevel_dir: # Didn't find a known root, just return the original path yield dir_ break else: yield parent_search def GenerateOutput(target_list, target_dicts, data, params): """Generate an XML settings file that can be imported into a CDT project.""" if params['options'].generator_output: raise NotImplementedError("--generator_output not implemented for eclipse") user_config = params.get('generator_flags', {}).get('config', None) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)<|fim▁end|>
<|file_name|>model_link_volume_response.go<|end_file_name|><|fim▁begin|>/* * 3DS OUTSCALE API * * Welcome to the 3DS OUTSCALE's API documentation.<br /><br /> The 3DS OUTSCALE API enables you to manage your resources in the 3DS OUTSCALE Cloud. This documentation describes the different actions available along with code examples.<br /><br /> Note that the 3DS OUTSCALE Cloud is compatible with Amazon Web Services (AWS) APIs, but some resources have different names in AWS than in the 3DS OUTSCALE API. You can find a list of the differences [here](https://wiki.outscale.net/display/EN/3DS+OUTSCALE+APIs+Reference).<br /><br /> You can also manage your resources using the [Cockpit](https://wiki.outscale.net/display/EN/About+Cockpit) web interface.<|fim▁hole|> * API version: 1.2 * Contact: [email protected] * Generated by: OpenAPI Generator (https://openapi-generator.tech) */ package osc // LinkVolumeResponse struct for LinkVolumeResponse type LinkVolumeResponse struct { ResponseContext ResponseContext `json:"ResponseContext,omitempty"` }<|fim▁end|>
*
<|file_name|>use-effect.ts<|end_file_name|><|fim▁begin|>import { TRAM_HOOK_KEY, TRAM_EFFECT_QUEUE } from './engine-names'; import { getEffectStore } from './effect-store';<|fim▁hole|>import { Effect } from './types'; /** * @name useEffect * @link https://tram-one.io/#use-effect * @description * Hook that triggers component start, update, and cleanup effects. * If the return of effect is another function, then that function is called on when the component is removed. * If the effect is dependent on a observable, it will automatically trigger again if that value updates. * * @param effect function to run on component mount */ export default (effect: Effect): void => { // get the store of effects const effectQueue = getEffectStore(TRAM_EFFECT_QUEUE); // get the key value from working-key const key = getWorkingKeyValue(TRAM_HOOK_KEY); // increment the working key branch value // this makes successive useEffects calls unique (until we reset the key) incrementWorkingKeyBranch(TRAM_HOOK_KEY); // append () so that it's easier to debug effects from components const callLikeKey = `${key}()`; // add the effect to the effect queue, so it can be processed later effectQueue[callLikeKey] = effect; };<|fim▁end|>
import { getWorkingKeyValue, incrementWorkingKeyBranch } from './working-key';
<|file_name|>Rule.js<|end_file_name|><|fim▁begin|>"use strict"; var Q = require("q"); var winston = require("winston"); var check = require('validator').check; var Rule = function() {}; Rule.prototype.initializeFromJson = function(json) { var self = this; return Q.fcall(function() { if(!json) throw new Error("JSON was not provided."); if(!json.url) throw new Error("URL was not provided."); check(json.url).isUrl(); if(!json.headers && !json.body) throw new Error("Either headers or a body must be present."); self.rules = json; winston.info("Set Rule: " + self.toString()); return self; }); }; Rule.prototype.getUrl = function() { return this.rules.url; }; Rule.prototype.getHeaders = function() { return this.rules.headers; }; Rule.prototype.getBody = function() { return this.rules.body; };<|fim▁hole|> Rule.prototype.toString = function() { return JSON.stringify(this.rules); }; module.exports = Rule;<|fim▁end|>
Rule.prototype.getUrlReplacement = function() { return this.rules.replaceUrls; };
<|file_name|>GodTemperDB.java<|end_file_name|><|fim▁begin|>package com.example.godtemper.db; import java.util.ArrayList; import java.util.List; import com.example.godtemper.model.City; import com.example.godtemper.model.County; import com.example.godtemper.model.Province; import android.R.integer; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; <|fim▁hole|> * Êý¾Ý¿âÃû */ public static final String DB_NAME = "GodTemper"; /** * Êý¾Ý¿â°æ±¾ */ public static final int VERSION = 1; private static GodTemperDB godTemperDB; private SQLiteDatabase db; private GodTemperDB(Context context){ GodTemperOpenHelper dbHelper = new GodTemperOpenHelper(context, DB_NAME, null, VERSION); db = dbHelper.getWritableDatabase(); } /** * »ñÈ¡godTemperDBµÄʵÀý * @param context * @return */ public synchronized static GodTemperDB getInstance(Context context){ if(godTemperDB == null){ godTemperDB = new GodTemperDB(context); } return godTemperDB; } /** * ½«ProvinceʵÀý´æ´¢µ½Êý¾Ý¿â * @param province */ public void saveProvince(Province province){ if(province != null){ ContentValues values = new ContentValues(); values.put("province_name", province.getProvinceName()); values.put("province_code", province.getProvinceCode()); db.insert("Province", null, values); } } /** * ´ÓÊý¾Ý¿â¶Áȡȫ¹úËùÓÐÊ¡·ÝµÄÐÅÏ¢ * @return */ public List<Province>loadProvinces(){ List<Province>list = new ArrayList<Province>(); Cursor cursor = db.query("Province", null, null, null, null, null, null); if(cursor.moveToFirst()){ do{ Province province = new Province(); province.setId(cursor.getInt(cursor.getColumnIndex("id"))); province.setProvinceName(cursor.getString(cursor.getColumnIndex("province_name"))); province.setProvinceCode(cursor.getString(cursor.getColumnIndex("province_code"))); list.add(province); }while(cursor.moveToNext()); } return list; } /** * ½«CityʵÀý´æ´¢µ½Êý¾Ý¿â * @param city */ public void saveCity(City city) { if(city!=null){ ContentValues values = new ContentValues(); values.put("city_name", city.getCityName()); values.put("city_code", city.getCityCode()); values.put("province_id", city.getProvinceId()); db.insert("City", null, values); } } /** * ´ÓÊý¾Ý¿â¶ÁȡijʡÏÂËùÓеijÇÊÐÐÅÏ¢ * @param provinceId * @return */ public List<City> loadCities(int provinceId) { List<City>list = new ArrayList<City>(); Cursor cursor = db.query("City", null, "province_id = ?", new String[]{String.valueOf(provinceId)}, null,null,null); if(cursor.moveToFirst()){ do{ City city = new City(); city.setId(cursor.getInt(cursor.getColumnIndex("id"))); city.setCityName(cursor.getString(cursor.getColumnIndex("city_name"))); city.setCityCode(cursor.getString(cursor.getColumnIndex("city_code"))); city.setProvinceId(provinceId); list.add(city); }while(cursor.moveToNext()); } return list; } /** * ½«CountyʵÀý´æ´¢µ½Êý¾Ý¿â */ public void saveCounty(County county){ if(county != null){ ContentValues values = new ContentValues(); values.put("county_name", county.getCountyName()); values.put("county_code", county.getCountyCode()); values.put("city_id", county.getCityId()); db.insert("County", null, values); } } /** * ´ÓÊý¾Ý¿â¶Áȡij³ÇÊÐÏÂËùÓÐÏØµÄÐÅÏ¢ */ public List<County>loadCounties (int cityId){ List<County>list = new ArrayList<County>(); Cursor cursor = db.query("County", null, "city_id = ?", new String[]{String.valueOf(cityId)}, null, null, null); if(cursor.moveToFirst()){ do{ County county = new County(); county.setId(cursor.getInt(cursor.getColumnIndex("id"))); county.setCountyName(cursor.getString(cursor.getColumnIndex("county_name"))); county.setCountyCode(cursor.getString(cursor.getColumnIndex("county_code"))); county.setCityId(cityId); list.add(county); }while(cursor.moveToNext()); } return list; } }<|fim▁end|>
public class GodTemperDB { /**
<|file_name|>g1RemSet.hpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #ifndef SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSET_HPP #define SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSET_HPP #include "gc_implementation/g1/g1RemSetSummary.hpp" // A G1RemSet provides ways of iterating over pointers into a selected // collection set. class G1CollectedHeap; class ConcurrentG1Refine; class G1ParPushHeapRSClosure; // A G1RemSet in which each heap region has a rem set that records the // external heap references into it. Uses a mod ref bs to track updates, // so that they can be used to update the individual region remsets. class G1RemSet: public CHeapObj<mtGC> { private: G1RemSetSummary _prev_period_summary; protected: G1CollectedHeap* _g1; size_t _conc_refine_cards; uint n_workers(); protected:<|fim▁hole|> MergeRStoDoDirtySync = 1, DoDirtySync = 2, LastSync = 3, SeqTask = 0, NumSeqTasks = 1 }; CardTableModRefBS* _ct_bs; SubTasksDone* _seq_task; G1CollectorPolicy* _g1p; ConcurrentG1Refine* _cg1r; size_t* _cards_scanned; size_t _total_cards_scanned; // Used for caching the closure that is responsible for scanning // references into the collection set. G1ParPushHeapRSClosure** _cset_rs_update_cl; // Print the given summary info virtual void print_summary_info(G1RemSetSummary * summary, const char * header = NULL); public: // This is called to reset dual hash tables after the gc pause // is finished and the initial hash table is no longer being // scanned. void cleanupHRRS(); G1RemSet(G1CollectedHeap* g1, CardTableModRefBS* ct_bs); ~G1RemSet(); // Invoke "blk->do_oop" on all pointers into the collection set // from objects in regions outside the collection set (having // invoked "blk->set_region" to set the "from" region correctly // beforehand.) // // Invoke code_root_cl->do_code_blob on the unmarked nmethods // on the strong code roots list for each region in the // collection set. // // The "worker_i" param is for the parallel case where the id // of the worker thread calling this function can be helpful in // partitioning the work to be done. It should be the same as // the "i" passed to the calling thread's work(i) function. // In the sequential case this param will be ignored. void oops_into_collection_set_do(G1ParPushHeapRSClosure* blk, CodeBlobClosure* code_root_cl, uint worker_i); // Prepare for and cleanup after an oops_into_collection_set_do // call. Must call each of these once before and after (in sequential // code) any threads call oops_into_collection_set_do. (This offers an // opportunity to sequential setup and teardown of structures needed by a // parallel iteration over the CS's RS.) void prepare_for_oops_into_collection_set_do(); void cleanup_after_oops_into_collection_set_do(); void scanRS(G1ParPushHeapRSClosure* oc, CodeBlobClosure* code_root_cl, uint worker_i); void updateRS(DirtyCardQueue* into_cset_dcq, uint worker_i); CardTableModRefBS* ct_bs() { return _ct_bs; } size_t cardsScanned() { return _total_cards_scanned; } // Record, if necessary, the fact that *p (where "p" is in region "from", // which is required to be non-NULL) has changed to a new non-NULL value. template <class T> void write_ref(HeapRegion* from, T* p); template <class T> void par_write_ref(HeapRegion* from, T* p, uint tid); // Requires "region_bm" and "card_bm" to be bitmaps with 1 bit per region // or card, respectively, such that a region or card with a corresponding // 0 bit contains no part of any live object. Eliminates any remembered // set entries that correspond to dead heap ranges. "worker_num" is the // parallel thread id of the current thread, and "hrclaimer" is the // HeapRegionClaimer that should be used. void scrub(BitMap* region_bm, BitMap* card_bm, uint worker_num, HeapRegionClaimer* hrclaimer); // Refine the card corresponding to "card_ptr". // If check_for_refs_into_cset is true, a true result is returned // if the given card contains oops that have references into the // current collection set. virtual bool refine_card(jbyte* card_ptr, uint worker_i, bool check_for_refs_into_cset); // Print accumulated summary info from the start of the VM. virtual void print_summary_info(); // Print accumulated summary info from the last time called. virtual void print_periodic_summary_info(const char* header); // Prepare remembered set for verification. virtual void prepare_for_verify(); size_t conc_refine_cards() const { return _conc_refine_cards; } }; class UpdateRSOopClosure: public ExtendedOopClosure { HeapRegion* _from; G1RemSet* _rs; uint _worker_i; template <class T> void do_oop_work(T* p); public: UpdateRSOopClosure(G1RemSet* rs, uint worker_i = 0) : _from(NULL), _rs(rs), _worker_i(worker_i) {} void set_from(HeapRegion* from) { assert(from != NULL, "from region must be non-NULL"); _from = from; } virtual void do_oop(narrowOop* p) { do_oop_work(p); } virtual void do_oop(oop* p) { do_oop_work(p); } // Override: this closure is idempotent. // bool idempotent() { return true; } bool apply_to_weak_ref_discovered_field() { return true; } }; #endif // SHARE_VM_GC_IMPLEMENTATION_G1_G1REMSET_HPP<|fim▁end|>
enum SomePrivateConstants { UpdateRStoMergeSync = 0,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>######################################################################################################################## # VECNet CI - Prototype # Date: 4/5/2013 # Institution: University of Notre Dame # Primary Authors: ######################################################################################################################## __author__ = 'ztorstri'<|fim▁hole|>from etlExceptions import *<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render from django.views import View <|fim▁hole|><|fim▁end|>
class SiteUpdateNotifier(View): def get(self, request): pass
<|file_name|>supersized.images.js<|end_file_name|><|fim▁begin|>jQuery(function($){ $.supersized({ <|fim▁hole|> stop_loop : 0, // Pauses slideshow on last slide random : 0, // Randomize slide order (Ignores start slide) slide_interval : 12000, // Length between transitions transition : 1, // 0-None, 1-Fade, 2-Slide Top, 3-Slide Right, 4-Slide Bottom, 5-Slide Left, 6-Carousel Right, 7-Carousel Left transition_speed : 1000, // Speed of transition new_window : 1, // Image links open in new window/tab pause_hover : 0, // Pause slideshow on hover keyboard_nav : 1, // Keyboard navigation on/off performance : 1, // 0-Normal, 1-Hybrid speed/quality, 2-Optimizes image quality, 3-Optimizes transition speed // (Only works for Firefox/IE, not Webkit) image_protect : 1, // Disables image dragging and right click with Javascript // Size & Position min_width : 0, // Min width allowed (in pixels) min_height : 0, // Min height allowed (in pixels) vertical_center : 1, // Vertically center background horizontal_center : 1, // Horizontally center background fit_always : 0, // Image will never exceed browser width or height (Ignores min. dimensions) fit_portrait : 1, // Portrait images will not exceed browser height fit_landscape : 0, // Landscape images will not exceed browser width // Components slide_links : 'blank', // Individual links for each slide (Options: false, 'num', 'name', 'blank') thumb_links : 0, // Individual thumb links for each slide thumbnail_navigation : 0, // Thumbnail navigation slides : [ // Slideshow Images {image : './images/feat-bg1.jpg', title : '<div class="major">Apollo</div><div class="slidedescription">Quisque non magna ac tortor tincidunt posuere. Vestibulum luctus aliquet gravida. Etiam non dolor sit amet libero porttitor egestas quis sed urna.</div><ul class="footprints"><li><a href="">Our Work</a></li><li><a href="">Our Blog</a></li><li><a href="">Our People</a></li><ul>', thumb : '', url : '#'}, {image : './images/feat-bg6.jpg', title : '<div class="major">LIVE EPIC</div><div class="slidedescription">What a piece of work is a man, how noble in reason, how infinite in faculties, in form and moving how express and admirable, in action how like an angel, in apprehension how like a god.</div>', thumb : '', url : '#'}, {image : './images/feat-bg2.jpg', title : '<div class="major minor">&hellip; or lavishly</div><div class="slidedescription">As a space, Work Happy hosts events regularly with the intent to educate and grow our local creative community. From courses on web design and programming, classes on photography, to community meetups to help introduce creatives to other creatives. </div>', thumb : '', url : '#'}, {image : './images/feat-bg3.jpg', title : '<div class="major">Apollo</div><div class="slidedescription">Desks start at $75 a month part time or $125 full time. All Co-workers have access to all Work Happuy events and meetups. Desks are month to month and the facility does have a security system.</div>', thumb : '', url : '#'}, {image : './images/feat-bg4.jpg', title : '<div class="major">Apollo</div><div class="slidedescription">Desks start at $75 a month part time or $125 full time. All Co-workers have access to all Work Happuy events and meetups. Desks are month to month and the facility does have a security system.</div>', thumb : '', url : '#'}, {image : './images/feat-bg5.jpg', title : '<div class="major">Apollo</div><div class="slidedescription">Desks start at $75 a month part time or $125 full time. All Co-workers have access to all Work Happuy events and meetups. Desks are month to month and the facility does have a security system.</div>', thumb : '', url : '#'} ], // Theme Options progress_bar : 0, // Timer for each slide mouse_scrub : 0 }); });<|fim▁end|>
// Functionality slideshow : 1, // Slideshow on/off autoplay : 1, // Slideshow starts playing automatically start_slide : 1, // Start slide (0 is random)
<|file_name|>Recap.py<|end_file_name|><|fim▁begin|># $Id$ # # Copyright (c) 2007, Novartis Institutes for BioMedical Research Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Novartis Institutes for BioMedical Research Inc. # nor the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # """ Implementation of the RECAP algorithm from Lewell et al. JCICS *38* 511-522 (1998) The published algorithm is implemented more or less without modification. The results are returned as a hierarchy of nodes instead of just as a set of fragments. The hope is that this will allow a bit more flexibility in working with the results. For example: >>> m = Chem.MolFromSmiles('C1CC1Oc1ccccc1-c1ncc(OC)cc1') >>> res = Recap.RecapDecompose(m) >>> res <Chem.Recap.RecapHierarchyNode object at 0x00CDB5D0> >>> res.children.keys() ['[*]C1CC1', '[*]c1ccccc1-c1ncc(OC)cc1', '[*]c1ccc(OC)cn1', '[*]c1ccccc1OC1CC1'] >>> res.GetAllChildren().keys() ['[*]c1ccccc1[*]', '[*]C1CC1', '[*]c1ccccc1-c1ncc(OC)cc1', '[*]c1ccc(OC)cn1', '[*]c1ccccc1OC1CC1'] To get the standard set of RECAP results, use GetLeaves(): >>> leaves=res.GetLeaves() >>> leaves.keys() ['[*]c1ccccc1[*]', '[*]c1ccc(OC)cn1', '[*]C1CC1'] >>> leaf = leaves['[*]C1CC1'] >>> leaf.mol <Chem.rdchem.Mol object at 0x00CBE0F0> """ import sys import weakref from rdkit import Chem from rdkit.Chem import rdChemReactions as Reactions from rdkit.six import iterkeys, iteritems, next # These are the definitions that will be applied to fragment molecules: reactionDefs = ( "[#7;+0;D2,D3:1]!@C(!@=O)!@[#7;+0;D2,D3:2]>>[*][#7:1].[#7:2][*]", # urea "[C;!$(C([#7])[#7]):1](=!@[O:2])!@[#7;+0;!D1:3]>>[*][C:1]=[O:2].[*][#7:3]", # amide "[C:1](=!@[O:2])!@[O;+0:3]>>[*][C:1]=[O:2].[O:3][*]", # ester "[N;!D1;+0;!$(N-C=[#7,#8,#15,#16])](-!@[*:1])-!@[*:2]>>[*][*:1].[*:2][*]", # amines #"[N;!D1](!@[*:1])!@[*:2]>>[*][*:1].[*:2][*]", # amines # again: what about aromatics? "[#7;R;D3;+0:1]-!@[*:2]>>[*][#7:1].[*:2][*]", # cyclic amines "[#6:1]-!@[O;+0]-!@[#6:2]>>[#6:1][*].[*][#6:2]", # ether "[C:1]=!@[C:2]>>[C:1][*].[*][C:2]", # olefin "[n;+0:1]-!@[C:2]>>[n:1][*].[C:2][*]", # aromatic nitrogen - aliphatic carbon "[O:3]=[C:4]-@[N;+0:1]-!@[C:2]>>[O:3]=[C:4]-[N:1][*].[C:2][*]", # lactam nitrogen - aliphatic carbon "[c:1]-!@[c:2]>>[c:1][*].[*][c:2]", # aromatic carbon - aromatic carbon "[n;+0:1]-!@[c:2]>>[n:1][*].[*][c:2]", # aromatic nitrogen - aromatic carbon *NOTE* this is not part of the standard recap set. "[#7;+0;D2,D3:1]-!@[S:2](=[O:3])=[O:4]>>[#7:1][*].[*][S:2](=[O:3])=[O:4]", # sulphonamide ) reactions = tuple([Reactions.ReactionFromSmarts(x) for x in reactionDefs]) class RecapHierarchyNode(object): """ This class is used to hold the Recap hiearchy """ mol=None children=None parents=None smiles = None def __init__(self,mol): self.mol=mol self.children = {} self.parents = {} def GetAllChildren(self): " returns a dictionary, keyed by SMILES, of children " res = {} for smi,child in iteritems(self.children): res[smi] = child child._gacRecurse(res,terminalOnly=False) return res def GetLeaves(self): " returns a dictionary, keyed by SMILES, of leaf (terminal) nodes " res = {} for smi,child in iteritems(self.children): if not len(child.children): res[smi] = child else: child._gacRecurse(res,terminalOnly=True) return res def getUltimateParents(self): """ returns all the nodes in the hierarchy tree that contain this node as a child """ if not self.parents: res = [self] else: res = [] for p in self.parents.values(): for uP in p.getUltimateParents(): if uP not in res: res.append(uP) return res def _gacRecurse(self,res,terminalOnly=False): for smi,child in iteritems(self.children): if not terminalOnly or not len(child.children): res[smi] = child child._gacRecurse(res,terminalOnly=terminalOnly) def __del__(self): self.children={} self.parent={} self.mol=None def RecapDecompose(mol,allNodes=None,minFragmentSize=0,onlyUseReactions=None): """ returns the recap decomposition for a molecule """ mSmi = Chem.MolToSmiles(mol,1) if allNodes is None: allNodes={} if mSmi in allNodes: return allNodes[mSmi] res = RecapHierarchyNode(mol) res.smiles =mSmi activePool={mSmi:res} allNodes[mSmi]=res while activePool: nSmi = next(iterkeys(activePool)) node = activePool.pop(nSmi) if not node.mol: continue for rxnIdx,reaction in enumerate(reactions): if onlyUseReactions and rxnIdx not in onlyUseReactions: continue #print ' .',nSmi #print ' !!!!',rxnIdx,nSmi,reactionDefs[rxnIdx] ps = reaction.RunReactants((node.mol,)) #print ' ',len(ps) if ps: for prodSeq in ps: seqOk=True # we want to disqualify small fragments, so sort the product sequence by size # and then look for "forbidden" fragments tSeq = [(prod.GetNumAtoms(onlyExplicit=True),idx) for idx,prod in enumerate(prodSeq)] tSeq.sort() ts=[(x,prodSeq[y]) for x,y in tSeq] prodSeq=ts for nats,prod in prodSeq: try: Chem.SanitizeMol(prod) except: continue pSmi = Chem.MolToSmiles(prod,1) if minFragmentSize>0: nDummies = pSmi.count('*') if nats-nDummies<minFragmentSize: seqOk=False break # don't forget after replacing dummy atoms to remove any empty # branches: elif pSmi.replace('[*]','').replace('()','') in ('','C','CC','CCC'): seqOk=False break prod.pSmi = pSmi if seqOk: for nats,prod in prodSeq: pSmi = prod.pSmi #print '\t',nats,pSmi if not pSmi in allNodes: pNode = RecapHierarchyNode(prod) pNode.smiles=pSmi pNode.parents[nSmi]=weakref.proxy(node) node.children[pSmi]=pNode activePool[pSmi] = pNode allNodes[pSmi]=pNode else: pNode=allNodes[pSmi] pNode.parents[nSmi]=weakref.proxy(node) node.children[pSmi]=pNode #print ' >>an:',allNodes.keys() return res # ------- ------- ------- ------- ------- ------- ------- ------- # Begin testing code if __name__=='__main__': import unittest class TestCase(unittest.TestCase): def test1(self): m = Chem.MolFromSmiles('C1CC1Oc1ccccc1-c1ncc(OC)cc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.children.keys())==4) self.assertTrue(len(res.GetAllChildren().keys())==5) self.assertTrue(len(res.GetLeaves().keys())==3) def test2(self): m = Chem.MolFromSmiles('CCCOCCC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(res.children=={}) def test3(self): allNodes={} m = Chem.MolFromSmiles('c1ccccc1-c1ncccc1') res = RecapDecompose(m,allNodes=allNodes) self.assertTrue(res) self.assertTrue(len(res.children.keys())==2) self.assertTrue(len(allNodes.keys())==3) m = Chem.MolFromSmiles('COc1ccccc1-c1ncccc1') res = RecapDecompose(m,allNodes=allNodes) self.assertTrue(res) self.assertTrue(len(res.children.keys())==2) # we get two more nodes from that: self.assertTrue(len(allNodes.keys())==5) self.assertTrue('[*]c1ccccc1OC' in allNodes) self.assertTrue('[*]c1ccccc1' in allNodes) m = Chem.MolFromSmiles('C1CC1Oc1ccccc1-c1ncccc1') res = RecapDecompose(m,allNodes=allNodes) self.assertTrue(res) self.assertTrue(len(res.children.keys())==4) self.assertTrue(len(allNodes.keys())==10) def testSFNetIssue1801871(self): m = Chem.MolFromSmiles('c1ccccc1OC(Oc1ccccc1)Oc1ccccc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertFalse('[*]C([*])[*]' in ks) self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]C([*])Oc1ccccc1' in ks) def testSFNetIssue1804418(self): m = Chem.MolFromSmiles('C1CCCCN1CCCC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]N1CCCCC1' in ks) self.assertTrue('[*]CCCC' in ks) def testMinFragmentSize(self): m = Chem.MolFromSmiles('CCCOCCC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(res.children=={}) res = RecapDecompose(m,minFragmentSize=3) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==1) ks = res.GetLeaves().keys() self.assertTrue('[*]CCC' in ks) m = Chem.MolFromSmiles('CCCOCC') res = RecapDecompose(m,minFragmentSize=3) self.assertTrue(res) self.assertTrue(res.children=={}) m = Chem.MolFromSmiles('CCCOCCOC') res = RecapDecompose(m,minFragmentSize=2) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]CCC' in ks) ks = res.GetLeaves().keys() self.assertTrue('[*]CCOC' in ks) def testAmideRxn(self): m = Chem.MolFromSmiles('C1CC1C(=O)NC1OC1') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]C(=O)C1CC1' in ks) self.assertTrue('[*]NC1CO1' in ks) m = Chem.MolFromSmiles('C1CC1C(=O)N(C)C1OC1') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]C(=O)C1CC1' in ks) self.assertTrue('[*]N(C)C1CO1' in ks) m = Chem.MolFromSmiles('C1CC1C(=O)n1cccc1') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]C(=O)C1CC1' in ks) self.assertTrue('[*]n1cccc1' in ks) m = Chem.MolFromSmiles('C1CC1C(=O)CC1OC1') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('C1CCC(=O)NC1') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('CC(=O)NC') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() m = Chem.MolFromSmiles('CC(=O)N') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('C(=O)NCCNC(=O)CC') res = RecapDecompose(m,onlyUseReactions=[1]) self.assertTrue(res) self.assertTrue(len(res.children)==4) self.assertTrue(len(res.GetLeaves())==3) def testEsterRxn(self): m = Chem.MolFromSmiles('C1CC1C(=O)OC1OC1') res = RecapDecompose(m,onlyUseReactions=[2]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]C(=O)C1CC1' in ks) self.assertTrue('[*]OC1CO1' in ks) m = Chem.MolFromSmiles('C1CC1C(=O)CC1OC1') res = RecapDecompose(m,onlyUseReactions=[2]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('C1CCC(=O)OC1') res = RecapDecompose(m,onlyUseReactions=[2]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testUreaRxn(self): m = Chem.MolFromSmiles('C1CC1NC(=O)NC1OC1') res = RecapDecompose(m,onlyUseReactions=[0]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]NC1CC1' in ks) self.assertTrue('[*]NC1CO1' in ks) m = Chem.MolFromSmiles('C1CC1NC(=O)N(C)C1OC1') res = RecapDecompose(m,onlyUseReactions=[0]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]NC1CC1' in ks) self.assertTrue('[*]N(C)C1CO1' in ks) m = Chem.MolFromSmiles('C1CCNC(=O)NC1C') res = RecapDecompose(m,onlyUseReactions=[0]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('c1cccn1C(=O)NC1OC1') res = RecapDecompose(m,onlyUseReactions=[0]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]n1cccc1' in ks) self.assertTrue('[*]NC1CO1' in ks) m = Chem.MolFromSmiles('c1cccn1C(=O)n1c(C)ccc1') res = RecapDecompose(m,onlyUseReactions=[0]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]n1cccc1C' in ks) def testAmineRxn(self): m = Chem.MolFromSmiles('C1CC1N(C1NC1)C1OC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==3) ks = res.GetLeaves().keys()<|fim▁hole|> self.assertTrue('[*]C1CO1' in ks) self.assertTrue('[*]C1CN1' in ks) m = Chem.MolFromSmiles('c1ccccc1N(C1NC1)C1OC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==3) ks = res.GetLeaves().keys() self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]C1CO1' in ks) self.assertTrue('[*]C1CN1' in ks) m = Chem.MolFromSmiles('c1ccccc1N(c1ncccc1)C1OC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==3) ks = res.GetLeaves().keys() self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]c1ccccn1' in ks) self.assertTrue('[*]C1CO1' in ks) m = Chem.MolFromSmiles('c1ccccc1N(c1ncccc1)c1ccco1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==3) ks = res.GetLeaves().keys() self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]c1ccccn1' in ks) self.assertTrue('[*]c1ccco1' in ks) m = Chem.MolFromSmiles('C1CCCCN1C1CC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]N1CCCCC1' in ks) self.assertTrue('[*]C1CC1' in ks) m = Chem.MolFromSmiles('C1CCC2N1CC2') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testEtherRxn(self): m = Chem.MolFromSmiles('C1CC1OC1OC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]C1CC1' in ks) self.assertTrue('[*]C1CO1' in ks) m = Chem.MolFromSmiles('C1CCCCO1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('c1ccccc1OC1OC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]C1CO1' in ks) m = Chem.MolFromSmiles('c1ccccc1Oc1ncccc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]c1ccccn1' in ks) def testOlefinRxn(self): m = Chem.MolFromSmiles('ClC=CBr') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]CCl' in ks) self.assertTrue('[*]CBr' in ks) m = Chem.MolFromSmiles('C1CC=CC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testAromNAliphCRxn(self): m = Chem.MolFromSmiles('c1cccn1CCCC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]n1cccc1' in ks) self.assertTrue('[*]CCCC' in ks) m = Chem.MolFromSmiles('c1ccc2n1CCCC2') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testLactamNAliphCRxn(self): m = Chem.MolFromSmiles('C1CC(=O)N1CCCC') res = RecapDecompose(m,onlyUseReactions=[8]) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]N1CCC1=O' in ks) self.assertTrue('[*]CCCC' in ks) m = Chem.MolFromSmiles('O=C1CC2N1CCCC2') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testAromCAromCRxn(self): m = Chem.MolFromSmiles('c1ccccc1c1ncccc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]c1ccccc1' in ks) self.assertTrue('[*]c1ccccn1' in ks) m = Chem.MolFromSmiles('c1ccccc1C1CC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testAromNAromCRxn(self): m = Chem.MolFromSmiles('c1cccn1c1ccccc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]n1cccc1' in ks) self.assertTrue('[*]c1ccccc1' in ks) def testSulfonamideRxn(self): m = Chem.MolFromSmiles('CCCNS(=O)(=O)CC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]NCCC' in ks) self.assertTrue('[*]S(=O)(=O)CC' in ks) m = Chem.MolFromSmiles('c1cccn1S(=O)(=O)CC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) ks = res.GetLeaves().keys() self.assertTrue('[*]n1cccc1' in ks) self.assertTrue('[*]S(=O)(=O)CC' in ks) m = Chem.MolFromSmiles('C1CNS(=O)(=O)CC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) def testSFNetIssue1881803(self): m = Chem.MolFromSmiles('c1ccccc1n1cccc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) m = Chem.MolFromSmiles('c1ccccc1[n+]1ccccc1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('C1CC1NC(=O)CC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) m = Chem.MolFromSmiles('C1CC1[NH+]C(=O)CC') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) m = Chem.MolFromSmiles('C1CC1NC(=O)NC1CCC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==2) m = Chem.MolFromSmiles('C1CC1[NH+]C(=O)[NH+]C1CCC1') res = RecapDecompose(m) self.assertTrue(res) self.assertTrue(len(res.GetLeaves())==0) unittest.main()<|fim▁end|>
self.assertTrue('[*]C1CC1' in ks)
<|file_name|>0002_phonenumber_related_sim.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-11-01 20:02 from __future__ import unicode_literals <|fim▁hole|> class Migration(migrations.Migration): initial = True dependencies = [ ('phone_numbers', '0001_initial'), ('sims', '0001_initial'), ] operations = [ migrations.AddField( model_name='phonenumber', name='related_sim', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phone_numbers', to='sims.Sim'), ), ]<|fim▁end|>
from django.db import migrations, models import django.db.models.deletion
<|file_name|>ChangePasswordScreenController.js<|end_file_name|><|fim▁begin|>Ext.define('Healthsurvey.view.mobileview.login.ChangePasswordScreenController', { extend : 'Ext.app.ViewController', alias : 'controller.changePasswordScreenController', onChangePasswordClick : function(btn, opts) { debugger; var form = btn.up().up(); if (form.isValid()) { var formData = form.getValues(); delete formData.reTypeNewPassword; var entMask = new Ext.LoadMask({ msg : 'Updating...', target : this.getView() }).show(); Ext.Ajax.request({ timeout : 180000, url : "secure/PasswordGenerator/changePassword", method : 'PUT', waitMsg : 'Updating...', entMask : entMask, jsonData : formData, me : this, success : function(response, sender) { debugger; var responseText = Ext.JSON.decode(response.responseText); if (responseText.response.success) { Ext.Msg.alert("Info", responseText.response.message); sender.me.onResetClick(); } else { Ext.Msg.alert("Info", responseText.response.message); } sender.entMask.hide(); }, failure : function(response, sender) { debugger; Ext.Msg.alert("ERROR", "Cannot connect to server"); sender.entMask.hide(); } }); } }, onResetClick : function(btn, opts) { debugger;<|fim▁hole|><|fim▁end|>
this.getView().getForm().reset(); } });