prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>Setup-lang-ru-debug.js<|end_file_name|><|fim▁begin|>/********************** tine translations of Setup**********************/ Locale.Gettext.prototype._msgs['./LC_MESSAGES/Setup'] = new Locale.Gettext.PO(({ "" : "Project-Id-Version: Tine 2.0\nPOT-Creation-Date: 2008-05-17 22:12+0100\nPO-Revision-Date: 2012-09-19 08:58+0000\nLast-Translator: corneliusweiss <[email protected]>\nLanguage-Team: Russian (http://www.transifex.com/projects/p/tine20/language/ru/)\nMIME-Version: 1.0\nContent-Type: text/plain; charset=UTF-8\nContent-Transfer-Encoding: 8bit\nLanguage: ru\nPlural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\nX-Poedit-Country: GB\nX-Poedit-Language: en\nX-Poedit-SourceCharset: utf-8\n" , "Name" : "Имя" , "Enabled" : "Позволенный" , "Order" : "Последовательность" , "Installed Version" : "Установленная версия" , "Available Version" : "Доступная версия" , "Status" : "Статус" , "Depends on" : "Зависит от" , "Install application" : "Установить приложение" , "Uninstall application" : "Удалить приложение" , "Update application" : "Обновить приложение" , "Go to {0} login" : "" , "uninstall" : "удалить" , "Do you really want to uninstall the application(s)?" : "Вы действительно хотите удалить приложение(я)?" , "This may take a while" : "Это может занять некоторое время" , "Dependency Violation" : "" , "Delete all existing users and groups" : "Удалить всех существующих пользователей и группы" , "Switching from SQL to LDAP will delete all existing User Accounts, Groups and Roles. Do you really want to switch the accounts storage backend to LDAP ?" : "" , "Backend" : "" , "Initial Admin User" : "" , "Initial admin login name" : "" , "Initial admin Password" : "" , "Password confirmation" : "Подтверждение пароля" , "Authentication provider" : "Проверка подлинности поставщика" , "Try to split username" : "Попробуйте разделить имя пользователя" , "Yes" : "Да" , "No" : "Нет" , "Account canonical form" : "" , "Account domain name " : "" , "Account domain short name" : "" , "Host" : "Хост" , "Port" : "Порт" , "Login name" : "" , "Password" : "Пароль" , "Bind requires DN" : "" , "Base DN" : "" , "Search filter" : "Фильтр поиска" , "Hostname" : "Имя узла" , "Secure Connection" : "Безопасное подключение" , "None" : "Нет" , "TLS" : "TLS" , "SSL" : "SSL" , "Append domain to login name" : "" , "Accounts storage" : "" , "Default user group name" : "" , "Default admin group name" : "" , "User DN" : "" , "User Filter" : "Фильтр пользователей" , "User Search Scope" : "" , "Groups DN" : "DN групп" , "Group Filter" : "Фильтр группы" , "Group Search Scope" : "" , "Password encoding" : "" , "Use Rfc 2307 bis" : "Использовать Rfc 2307 bis" , "Min User Id" : "Мин ID пользователя" , "Max User Id" : "Макс ID пользователя" , "Min Group Id" : "Минимальный ID группы" , "Max Group Id" : "Максимальный ID группы" , "Group UUID Attribute name" : "" , "User UUID Attribute name" : "" , "Readonly access" : "" , "Password Settings" : "" , "User can change password" : "Пользователь должен сменить пароль" , "Enable password policy" : "" , "Only ASCII" : "" , "Minimum length" : "" , "Minimum word chars" : "" , "Minimum uppercase chars" : "" , "Minimum special chars" : "" , "Minimum numbers" : "" , "Redirect Settings" : "" , "Redirect Url (redirect to login screen if empty)" : "" , "Redirect Always (if No, only redirect after logout)" : "" , "Redirect to referring site, if exists" : "" , "Save config and install" : "" , "Save config" : "Сохранить конфигурацию" , "Passwords don't match" : "Пароли не совпадают" , "Should not be empty" : "Не должен быть пустым" , "File" : "Файл" , "Adapter" : "Адаптер" , "Setup Authentication" : "Аутентификация установки" , "Username" : "Имя пользователя" , "Database" : "База данных" , "User" : "Пользователь" , "Prefix" : "Префикс" , "Logging" : "Регистрация" , "Filename" : "Имя файла" , "Priority" : "Приоритет" , "Caching" : "Кэширование" , "Path" : "Путь" , "Lifetime (seconds)" : "Время жизни (секунд)" , "Temporary files" : "Временные файлы" , "Temporary Files Path" : "Путь к временным файлам" , "Session" : "Сессия" , "Filestore directory" : "Каталог хранилища файлов" , "Filestore Path" : "" , "Addressbook Map panel" : "" , "Map panel" : "" , "disabled" : "отключен" , "enabled" : "включен" , "Config file is not writable" : "Config файл защищен от записи" , "Download config file" : "Загрузить файл конфигурации" , "Standard IMAP" : "Стандартный IMAP" , "Standard SMTP" : "Стандартный SMTP" , "Imap" : "Imap" , "Use system account" : "Использование системной учетной записи" , "Cyrus Admin" : "" , "Cyrus Admin Password" : "" , "Smtp" : "Smtp" , "Authentication" : ""<|fim▁hole|> , "Login" : "" , "Plain" : "" , "Primary Domain" : "" , "Secondary Domains (comma separated)" : "" , "Notifications service address" : "" , "Notification Username" : "" , "Notification Password" : "" , "Notifications local client (hostname or IP address)" : "" , "SIEVE" : "" , "MySql Hostname" : "" , "MySql Database" : "База данных MySql" , "MySql User" : "Пользователь MySql" , "MySql Password" : "Пароль MySql" , "MySql Port" : "Порт MySql" , "User or UID" : "Пользователь или UID" , "Group or GID" : "Группа или GID" , "Home Template" : "" , "Password Scheme" : "Схема пароля" , "PLAIN-MD5" : "PLAIN-MD5" , "MD5-CRYPT" : "MD5-CRYPT" , "SHA1" : "SHA1" , "SHA256" : "SHA256" , "SSHA256" : "SSHA256" , "SHA512" : "SHA512" , "SSHA512" : "SSHA512" , "PLAIN" : "PLAIN" , "Performing Environment Checks..." : "Проведение проверок окружения..." , "There could not be found any {0}. Please try to change your filter-criteria, view-options or the {1} you search in." : "" , "Check" : "Проверка" , "Result" : "Результат" , "Error" : "Ошибка" , "Run setup tests" : "Запуск тестов установки" , "Ignore setup tests" : "" , "Terms and Conditions" : "" , "Setup Checks" : "Проверки установки" , "Config Manager" : "Управление конфигурацией" , "Authentication/Accounts" : "" , "Email" : "" , "Application Manager" : "Управление приложениями" , "Application, Applications" : [ "Приложение" ,"Приложения" ,"Приложений" ] , "I have read the license agreement and accept it" : "" , "License Agreement" : "" , "I have read the privacy agreement and accept it" : "" , "Privacy Agreement" : "" , "Accept Terms and Conditions" : "" , "Application" : "Приложение" }));<|fim▁end|>
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase from finance.models import Banking_Account class IBANTestCase(TestCase):<|fim▁hole|> def test_iban_converter(self): """BBAN to IBAN conversion""" self.assertEqual(Banking_Account.convertBBANToIBAN("091-0002777-90"), 'BE34091000277790') self.assertEqual(Banking_Account.convertBBANToIBAN("679-2005502-27"), 'BE48679200550227') self.assertEqual(Banking_Account.isBBAN("679-2005502-27"), True) self.assertEqual(Banking_Account.isBBAN('BE48679200550227'), False) self.assertEqual(Banking_Account.isIBAN("679-2005502-27"), False) self.assertEqual(Banking_Account.isIBAN('BE48679200550227'), True)<|fim▁end|>
def setUp(self): pass
<|file_name|>jsonSchema.js<|end_file_name|><|fim▁begin|>define(['../Property', '../Model', 'dojo/_base/declare', 'json-schema/lib/validate'], function (Property, Model, declare, jsonSchemaValidator) { // module: // dstore/extensions/JsonSchema // summary: // This module generates a dstore schema from a JSON Schema to enabled validation of objects // and property changes with JSON Schema return function (jsonSchema) { // create the schema that can be used by dstore/Model var modelSchema = {}; var properties = jsonSchema.properties || jsonSchema; // the validation function, this can be used for all the properties function checkForErrors() { var value = this.valueOf(); var key = this.name; // get the current value and test it against the property's definition var validation = jsonSchemaValidator.validate(value, properties[key]); // set any errors var errors = validation.errors; if (errors) { // assign the property names to the errors for (var i = 0; i < errors.length; i++) { errors[i].property = key; } } return errors; } // iterate through the schema properties, creating property validators for (var i in properties) { var jsDefinition = properties[i]; var definition = modelSchema[i] = new Property({ checkForErrors: checkForErrors }); if (typeof jsDefinition.type === 'string') { // copy the type so it can be used for coercion definition.type = jsDefinition.type; } if (typeof jsDefinition['default'] === 'string') { // and copy the default definition['default'] = jsDefinition['default']; } }<|fim▁hole|> schema: modelSchema }); }; });<|fim▁end|>
return declare(Model, {
<|file_name|>drop-trait-generic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //<|fim▁hole|>// option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(unsafe_destructor)] struct S<T> { x: T } #[unsafe_destructor] impl<T> ::std::ops::Drop for S<T> { fn drop(&mut self) { println!("bye"); } } pub fn main() { let _x = S { x: 1 }; }<|fim▁end|>
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
<|file_name|>Heartbreaker.js<|end_file_name|><|fim▁begin|>import SPELLS from 'common/SPELLS'; import RESOURCE_TYPES from 'game/RESOURCE_TYPES'; import Analyzer, { SELECTED_PLAYER } from 'parser/core/Analyzer'; import Events from 'parser/core/Events'; import STATISTIC_ORDER from 'parser/ui/STATISTIC_ORDER'; import TalentStatisticBox from 'parser/ui/TalentStatisticBox'; import React from 'react'; const DEATHSTRIKE_COST = 40; class Heartbreaker extends Analyzer { rpGains = []; hsCasts = 0; constructor(...args) { super(...args); this.active = this.selectedCombatant.hasTalent(SPELLS.HEARTBREAKER_TALENT.id); this.addEventListener(Events.cast.by(SELECTED_PLAYER).spell(SPELLS.HEART_STRIKE), this.onCast); this.addEventListener(Events.energize.spell(SPELLS.HEARTBREAKER), this.onEnergize); } onCast(event) { this.hsCasts += 1; } onEnergize(event) { if (event.resourceChangeType !== RESOURCE_TYPES.RUNIC_POWER.id) { return; } this.rpGains.push(event.resourceChange); } get totalRPGained() { return this.rpGains.reduce((a, b) => a + b, 0); } get averageHearStrikeHits() { return (this.rpGains.length / this.hsCasts).toFixed(2); } statistic() {<|fim▁hole|> return ( <TalentStatisticBox talent={SPELLS.HEARTBREAKER_TALENT.id} position={STATISTIC_ORDER.OPTIONAL(1)} value={this.totalRPGained} label="Runic Power gained" tooltip={ <> Resulting in about {Math.floor(this.totalRPGained / DEATHSTRIKE_COST)} extra Death Strikes. <br /> Your Heart Strike hit on average {this.averageHearStrikeHits} targets. </> } /> ); } } export default Heartbreaker;<|fim▁end|>
<|file_name|>radio_box.py<|end_file_name|><|fim▁begin|># Created On: 2010-06-02 # Copyright 2015 Hardcoded Software (http://www.hardcoded.net) # # This software is licensed under the "GPLv3" License as described in the "LICENSE" file, # which should be included with this package. The terms are also available at # http://www.gnu.org/licenses/gpl-3.0.html from PyQt5.QtCore import pyqtSignal from PyQt5.QtWidgets import QWidget, QHBoxLayout, QRadioButton from .util import horizontalSpacer class RadioBox(QWidget): def __init__(self, parent=None, items=None, spread=True, **kwargs): # If spread is False, insert a spacer in the layout so that the items don't use all the # space they're given but rather align left. if items is None: items = [] super().__init__(parent, **kwargs) self._buttons = [] self._labels = items self._selected_index = 0 self._spacer = horizontalSpacer() if not spread else None self._layout = QHBoxLayout(self) self._update_buttons() #--- Private def _update_buttons(self): if self._spacer is not None: self._layout.removeItem(self._spacer) to_remove = self._buttons[len(self._labels):] for button in to_remove: self._layout.removeWidget(button) button.setParent(None) del self._buttons[len(self._labels):] to_add = self._labels[len(self._buttons):] for _ in to_add: button = QRadioButton(self) self._buttons.append(button) self._layout.addWidget(button) button.toggled.connect(self.buttonToggled) if self._spacer is not None: self._layout.addItem(self._spacer) if not self._buttons: return<|fim▁hole|> self._update_selection() def _update_selection(self): self._selected_index = max(0, min(self._selected_index, len(self._buttons)-1)) selected = self._buttons[self._selected_index] selected.setChecked(True) #--- Event Handlers def buttonToggled(self): for i, button in enumerate(self._buttons): if button.isChecked(): self._selected_index = i self.itemSelected.emit(i) break #--- Signals itemSelected = pyqtSignal(int) #--- Properties @property def buttons(self): return self._buttons[:] @property def items(self): return self._labels[:] @items.setter def items(self, value): self._labels = value self._update_buttons() @property def selected_index(self): return self._selected_index @selected_index.setter def selected_index(self, value): self._selected_index = value self._update_selection()<|fim▁end|>
for button, label in zip(self._buttons, self._labels): button.setText(label)
<|file_name|>memory.go<|end_file_name|><|fim▁begin|>package collector import ( "github.com/lowstz/mongodb_exporter/shared" ) //Mem type MemStats struct { Bits float64 `bson:"bits"` Resident float64 `bson:"resident"`<|fim▁hole|> Virtual float64 `bson:"virtual"` Mapped float64 `bson:"mapped"` MappedWithJournal float64 `bson:"mappedWithJournal"` } func (memStats *MemStats) Export(groupName string) { group := shared.FindOrCreateGroup(groupName) group.Export("resident", memStats.Resident) group.Export("virtual", memStats.Virtual) group.Export("mapped", memStats.Mapped) group.Export("mapped_with_journal", memStats.MappedWithJournal) }<|fim▁end|>
<|file_name|>0023_userdetail_contributor_uid.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('changeset', '0022_auto_20160222_2358'), ] operations = [ migrations.AddField( model_name='userdetail', name='contributor_uid',<|fim▁hole|> ]<|fim▁end|>
field=models.IntegerField(db_index=True, null=True, blank=True), ),
<|file_name|>audio-whitenoise.rs<|end_file_name|><|fim▁begin|>extern crate sdl2; extern crate rand; use sdl2::audio::{AudioCallback, AudioSpecDesired}; use std::time::Duration; struct MyCallback { volume: f32 } impl AudioCallback for MyCallback { type Channel = f32; fn callback(&mut self, out: &mut [f32]) { use self::rand::{Rng, thread_rng}; let mut rng = thread_rng(); // Generate white noise for x in out.iter_mut() { *x = (rng.gen_range(0.0, 2.0) - 1.0) * self.volume; } } } fn main() -> Result<(), String> { let sdl_context = sdl2::init()?; let audio_subsystem = sdl_context.audio()?; let desired_spec = AudioSpecDesired { freq: Some(44_100), channels: Some(1), // mono samples: None, // default sample size }; // None: use default device let mut device = audio_subsystem.open_playback(None, &desired_spec, |spec| { // Show obtained AudioSpec println!("{:?}", spec); MyCallback { volume: 0.5 } })?; // Start playback device.resume(); // Play for 1 second std::thread::sleep(Duration::from_millis(1_000)); { // Acquire a lock. This lets us read and modify callback data. let mut lock = device.lock(); (*lock).volume = 0.25;<|fim▁hole|> } // Play for another second std::thread::sleep(Duration::from_millis(1_000)); // Device is automatically closed when dropped Ok(()) }<|fim▁end|>
// Lock guard is dropped here
<|file_name|>mock-raf-tests.ts<|end_file_name|><|fim▁begin|>import * as createMockRaf from 'mock-raf'; const mockRaf = createMockRaf(); const id = mockRaf.raf(() => {<|fim▁hole|>}); mockRaf.step({ count: 10 }); mockRaf.cancel(id); console.log(mockRaf.now());<|fim▁end|>
console.log('RAF!!!');
<|file_name|>test_short_code.py<|end_file_name|><|fim▁begin|># coding=utf-8 r""" This code was generated by \ / _ _ _| _ _ | (_)\/(_)(_|\/| |(/_ v1.0.0 / / """ from tests import IntegrationTestCase from tests.holodeck import Request<|fim▁hole|>from twilio.http.response import Response class ShortCodeTestCase(IntegrationTestCase): def test_fetch_request(self): self.holodeck.mock(Response(500, '')) with self.assertRaises(TwilioException): self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch() self.holodeck.assert_has_request(Request( 'get', 'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/SMS/ShortCodes/SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json', )) def test_fetch_response(self): self.holodeck.mock(Response( 200, ''' { "account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "api_version": "2010-04-01", "date_created": null, "date_updated": null, "friendly_name": "API_CLUSTER_TEST_SHORT_CODE", "short_code": "99990", "sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "sms_fallback_method": "POST", "sms_fallback_url": null, "sms_method": "POST", "sms_url": null, "uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json" } ''' )) actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch() self.assertIsNotNone(actual) def test_update_request(self): self.holodeck.mock(Response(500, '')) with self.assertRaises(TwilioException): self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update() self.holodeck.assert_has_request(Request( 'post', 'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/SMS/ShortCodes/SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json', )) def test_update_response(self): self.holodeck.mock(Response( 200, ''' { "account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "api_version": "2010-04-01", "date_created": null, "date_updated": null, "friendly_name": "API_CLUSTER_TEST_SHORT_CODE", "short_code": "99990", "sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "sms_fallback_method": "POST", "sms_fallback_url": null, "sms_method": "POST", "sms_url": null, "uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json" } ''' )) actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes("SCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update() self.assertIsNotNone(actual) def test_list_request(self): self.holodeck.mock(Response(500, '')) with self.assertRaises(TwilioException): self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes.list() self.holodeck.assert_has_request(Request( 'get', 'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/SMS/ShortCodes.json', )) def test_read_full_response(self): self.holodeck.mock(Response( 200, ''' { "end": 0, "first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50", "last_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50", "next_page_uri": null, "num_pages": 1, "page": 0, "page_size": 50, "previous_page_uri": null, "short_codes": [ { "account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "api_version": "2010-04-01", "date_created": null, "date_updated": null, "friendly_name": "API_CLUSTER_TEST_SHORT_CODE", "short_code": "99990", "sid": "SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "sms_fallback_method": "POST", "sms_fallback_url": null, "sms_method": "POST", "sms_url": null, "uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes/SCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json" } ], "start": 0, "total": 1, "uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json" } ''' )) actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes.list() self.assertIsNotNone(actual) def test_read_empty_response(self): self.holodeck.mock(Response( 200, ''' { "end": 0, "first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50", "last_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json?Page=0&PageSize=50", "next_page_uri": null, "num_pages": 1, "page": 0, "page_size": 50, "previous_page_uri": null, "short_codes": [], "start": 0, "total": 1, "uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/SMS/ShortCodes.json" } ''' )) actual = self.client.api.v2010.accounts("ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .short_codes.list() self.assertIsNotNone(actual)<|fim▁end|>
from twilio.base.exceptions import TwilioException
<|file_name|>recursion.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. <|fim▁hole|>// xfail-test enum Nil {Nil} struct Cons<T> {head:int, tail:T} trait Dot {fn dot(other:self) -> int;} impl Dot for Nil { fn dot(_:Nil) -> int {0} } impl<T:Dot> Dot for Cons<T> { fn dot(other:Cons<T>) -> int { self.head * other.head + self.tail.dot(other.tail) } } fn test<T:Dot> (n:int, i:int, first:T, second:T) ->int { match n { 0 => {first.dot(second)} // Error message should be here. It should be a type error // to instantiate `test` at a type other than T. (See #4287) _ => {test (n-1, i+1, Cons {head:2*i+1, tail:first}, Cons{head:i*i, tail:second})} } } pub fn main() { let n = test(1, 0, Nil, Nil); println!("{}", n); }<|fim▁end|>
<|file_name|>protoc_java.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generate java source files from protobuf files. This is a helper file for the genproto_java action in protoc_java.gypi. It performs the following steps: 1. Deletes all old sources (ensures deleted classes are not part of new jars). 2. Creates source directory. 3. Generates Java files using protoc (output into either --java-out-dir or --srcjar). 4. Creates a new stamp file. """ import os import optparse import shutil import subprocess import sys sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp")) from util import build_utils def main(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option("--protoc", help="Path to protoc binary.") parser.add_option("--proto-path", help="Path to proto directory.") parser.add_option("--java-out-dir",<|fim▁hole|> parser.add_option("--srcjar", help="Path to output srcjar.") parser.add_option("--stamp", help="File to touch on success.") options, args = parser.parse_args(argv) build_utils.CheckOptions(options, parser, ['protoc', 'proto_path']) if not options.java_out_dir and not options.srcjar: print 'One of --java-out-dir or --srcjar must be specified.' return 1 with build_utils.TempDir() as temp_dir: # Specify arguments to the generator. generator_args = ['optional_field_style=reftypes', 'store_unknown_fields=true'] out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir # Generate Java files using protoc. build_utils.CheckOutput( [options.protoc, '--proto_path', options.proto_path, out_arg] + args) if options.java_out_dir: build_utils.DeleteDirectory(options.java_out_dir) shutil.copytree(temp_dir, options.java_out_dir) else: build_utils.ZipDir(options.srcjar, temp_dir) if options.depfile: build_utils.WriteDepfile( options.depfile, args + [options.protoc] + build_utils.GetPythonDependencies()) if options.stamp: build_utils.Touch(options.stamp) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))<|fim▁end|>
help="Path to output directory for java files.")
<|file_name|>amiibro.py<|end_file_name|><|fim▁begin|>class Amiibro: def __init__(self): self._amiibos={#'hex': {'name': 'Noname', 'method': methodObject, 'params': params}<|fim▁hole|> } def handleTag(self, hex=None): if hex in self._amiibos: print(self._amiibos[hex]['name']) if self._amiibos[hex]['params']: self._amiibos[hex]['method'](self._amiibos[hex]['params']) else: self._amiibos[hex]['method']() return self._amiibos[hex]['name']<|fim▁end|>
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>""" This module contains signals related to enterprise. """ import logging import six from django.conf import settings from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user from django.db.models.signals import post_save, pre_save from django.dispatch import receiver from enterprise.models import EnterpriseCourseEnrollment, EnterpriseCustomer, EnterpriseCustomerUser from integrated_channels.integrated_channel.tasks import ( transmit_single_learner_data, transmit_single_subsection_learner_data ) from slumber.exceptions import HttpClientError from lms.djangoapps.email_marketing.tasks import update_user from openedx.core.djangoapps.commerce.utils import ecommerce_api_client from openedx.core.djangoapps.signals.signals import COURSE_GRADE_NOW_PASSED, COURSE_ASSESSMENT_GRADE_CHANGED from openedx.features.enterprise_support.api import enterprise_enabled from openedx.features.enterprise_support.tasks import clear_enterprise_customer_data_consent_share_cache from openedx.features.enterprise_support.utils import clear_data_consent_share_cache, is_enterprise_learner from common.djangoapps.student.signals import UNENROLL_DONE log = logging.getLogger(__name__) @receiver(post_save, sender=EnterpriseCustomerUser) def update_email_marketing_user_with_enterprise_vars(sender, instance, **kwargs): # pylint: disable=unused-argument, invalid-name """ Update the SailThru user with enterprise-related vars. """ user = User.objects.get(id=instance.user_id) # perform update asynchronously update_user.delay( sailthru_vars={ 'is_enterprise_learner': True, 'enterprise_name': instance.enterprise_customer.name, }, email=user.email ) @receiver(post_save, sender=EnterpriseCourseEnrollment) def update_dsc_cache_on_course_enrollment(sender, instance, **kwargs): # pylint: disable=unused-argument """ clears data_sharing_consent_needed cache after Enterprise Course Enrollment """ clear_data_consent_share_cache( instance.enterprise_customer_user.user_id, instance.course_id ) @receiver(pre_save, sender=EnterpriseCustomer) def update_dsc_cache_on_enterprise_customer_update(sender, instance, **kwargs): """ clears data_sharing_consent_needed cache after enable_data_sharing_consent flag is changed. """ old_instance = sender.objects.filter(pk=instance.uuid).first() if old_instance: # instance already exists, so it's updating. new_value = instance.enable_data_sharing_consent old_value = old_instance.enable_data_sharing_consent if new_value != old_value: kwargs = {'enterprise_customer_uuid': six.text_type(instance.uuid)} result = clear_enterprise_customer_data_consent_share_cache.apply_async(kwargs=kwargs) log.info(u"DSC: Created {task_name}[{task_id}] with arguments {kwargs}".format( task_name=clear_enterprise_customer_data_consent_share_cache.name, task_id=result.task_id, kwargs=kwargs, )) @receiver(COURSE_GRADE_NOW_PASSED, dispatch_uid="new_passing_enterprise_learner") def handle_enterprise_learner_passing_grade(sender, user, course_id, **kwargs): # pylint: disable=unused-argument """ Listen for a learner passing a course, transmit data to relevant integrated channel """ if enterprise_enabled() and is_enterprise_learner(user): kwargs = { 'username': six.text_type(user.username), 'course_run_id': six.text_type(course_id) } transmit_single_learner_data.apply_async(kwargs=kwargs) @receiver(COURSE_ASSESSMENT_GRADE_CHANGED) def handle_enterprise_learner_subsection(sender, user, course_id, subsection_id, subsection_grade, **kwargs): # pylint: disable=unused-argument """ Listen for an enterprise learner completing a subsection, transmit data to relevant integrated channel. """ if enterprise_enabled() and is_enterprise_learner(user): kwargs = { 'username': str(user.username), 'course_run_id': str(course_id), 'subsection_id': str(subsection_id), 'grade': str(subsection_grade), } transmit_single_subsection_learner_data.apply_async(kwargs=kwargs) @receiver(UNENROLL_DONE) def refund_order_voucher(sender, course_enrollment, skip_refund=False, **kwargs): # pylint: disable=unused-argument """ Call the /api/v2/enterprise/coupons/create_refunded_voucher/ API to create new voucher and assign it to user. """ if skip_refund: return if not course_enrollment.refundable(): return if not EnterpriseCourseEnrollment.objects.filter( enterprise_customer_user__user_id=course_enrollment.user_id, course_id=str(course_enrollment.course.id) ).exists(): return service_user = User.objects.get(username=settings.ECOMMERCE_SERVICE_WORKER_USERNAME) client = ecommerce_api_client(service_user) order_number = course_enrollment.get_order_attribute_value('order_number') if order_number: error_message = u"Encountered {} from ecommerce while creating refund voucher. Order={}, enrollment={}, user={}" try:<|fim▁hole|> except HttpClientError as ex: log.info( error_message.format(type(ex).__name__, order_number, course_enrollment, course_enrollment.user) ) except Exception as ex: # pylint: disable=broad-except log.exception( error_message.format(type(ex).__name__, order_number, course_enrollment, course_enrollment.user) )<|fim▁end|>
client.enterprise.coupons.create_refunded_voucher.post({"order": order_number})
<|file_name|>ProbeScience.mock.js<|end_file_name|><|fim▁begin|>import { PROBES_SCIENCE_QUERY, PROBES_SCIENCE_SUB, PROBES_SCIENCE_CONTACT_SUB, } from "components/views/ProbeScience"; import systems from "../data/systems"; import sensors from "../data/sensors"; import sensorContacts from "../data/sensorContacts"; import { PROBE_SCIENCE_CORE_QUERY, PROBES_SCIENCE_CORE_SUB, PROBE_SCIENCE_CONTACTS_CORE_SUB, } from "components/views/ProbeScience/core"; import {PROBE_SCIENCE_EMITTER_SUB} from "components/views/ProbeScience/probeScience"; export default [ { request: { query: PROBES_SCIENCE_QUERY, variables: {simulatorId: "test"}, }, result: { data: { sensorContacts, sensors, probes: systems.probes, }, }, }, { request: { query: PROBES_SCIENCE_SUB,<|fim▁hole|> probesUpdate: systems.probes, }, }, }, { request: { query: PROBES_SCIENCE_CONTACT_SUB, variables: {simulatorId: "test"}, }, result: { data: { sensorContactUpdate: sensorContacts, }, }, }, { request: { query: PROBE_SCIENCE_EMITTER_SUB, variables: {simulatorId: "test"}, }, result: { data: { scienceProbeEmitter: { name: null, type: null, charge: null, }, }, }, }, { request: { query: PROBE_SCIENCE_EMITTER_SUB, variables: {simulatorId: "test"}, }, result: { data: { scienceProbeEmitter: { name: null, type: null, charge: null, }, }, }, }, { request: { query: PROBE_SCIENCE_CORE_QUERY, variables: {simulatorId: "test"}, }, result: { data: { sensorContacts, sensors, probes: systems.probes, }, }, }, { request: { query: PROBES_SCIENCE_CORE_SUB, variables: {simulatorId: "test"}, }, result: { data: { probesUpdate: systems.probes, }, }, }, { request: { query: PROBE_SCIENCE_CONTACTS_CORE_SUB, variables: {simulatorId: "test"}, }, result: { data: { sensorContactUpdate: sensorContacts, }, }, }, ];<|fim▁end|>
variables: {simulatorId: "test"}, }, result: { data: {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod xdg; <|fim▁hole|><|fim▁end|>
pub use self::xdg::Xdg;
<|file_name|>firefox_run.py<|end_file_name|><|fim▁begin|>import os from outlawg import Outlawg from fftool import ( DIR_CONFIGS, local ) from ini_handler import IniHandler Log = Outlawg() env = IniHandler() env.load_os_config(DIR_CONFIGS) def launch_firefox(profile_path, channel, logging, nspr_log_modules=''): """relies on the other functions (download, install, profile) having completed. """ FIREFOX_APP_BIN = env.get(channel, 'PATH_FIREFOX_BIN_ENV') Log.header('LAUNCH FIREFOX') print("Launching Firefox {0} with profile: {1}".format( channel, profile_path) ) cmd = '"{0}" -profile "{1}"'.format(FIREFOX_APP_BIN, profile_path) print('CMD: ' + cmd) # NSPR_LOG_MODULES if nspr_log_modules: Log.header('FIREFOX NSPR_LOG_MODULES LOGGING') os.environ['NSPR_LOG_MODULES'] = nspr_log_modules <|fim▁hole|><|fim▁end|>
local(cmd, logging)
<|file_name|>ItemTest.js<|end_file_name|><|fim▁begin|>module('Item'); test('.setOptions()', function() { var item = new Item({ name: 'Name', category: 'Category', icon: 'Icon', value: 'Value', flavor: 'Flavor', rarity: 'Rare', }); item.code = 'Code'; equal(item.getName(),'Name');<|fim▁hole|> equal(item.getRarity(),'Rare'); equal(item.getCode(),'Code'); equal(item.isEquipment(),false); }); test('Rarity Classes', function() { var item = Factories.buildItem({}); equal(item.getRarity(),'Common'); equal(item.getClassName(),'highlight-item-common'); item.rarity = 'Uncommon'; equal(item.getClassName(),'highlight-item-uncommon'); item.rarity = 'Rare'; equal(item.getClassName(),'highlight-item-rare'); item.rarity = 'Epic'; equal(item.getClassName(),'highlight-item-epic'); });<|fim▁end|>
equal(item.getCategory(),'Category'); equal(item.getIcon(),'Icon'); equal(item.getValue(),'Value'); equal(item.getFlavor(),'Flavor');
<|file_name|>ihex_writer.rs<|end_file_name|><|fim▁begin|>// // Copyright 2016 The c8s Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>. // All files in the project carrying such notice may not be copied, modified, or // distributed except according to those terms. // use std::cell::RefCell; use ihex::record::Record; use ihex::writer; use assembler::data_range; use assembler::data_range::DataRange; // MARK: - Public API /** Converts the provided data ranges into a single Intel HEX record. These data ranges cannot overlap and still yield a valid ihex record. It is the responsibility of the caller to ensure this does not happen. @param ranges The data ranges to convert to ihex format. @return The complete ihex record (including EOF) of the given data ranges. */ pub fn ihex_representation_of_data_ranges<'a>(ranges: &'a [DataRange]) -> String { assert!(data_range::find_overlapping_ranges(ranges).len() == 0); // All records are collected into a list. let mut records = Vec::<Record>::new(); for range in ranges.iter() { // The range will be sub-divded into chunks of up to 16 bytes, so sub-address must be tracked. let record_address = RefCell::new(u16::from(range.address_range().start)); // Sub-divide the range into 16-byte Record::Data objects. records.append( &mut range // Inspect the data in the range. .data() // As a u8 slice. .as_slice() // In groups of 16 bytes. .chunks(16) // Create a tuple of (Length, Record). .map(|chunk| { (chunk.len() as u16, Record::Data { offset: *record_address.borrow(), value: Vec::from(chunk) }) }) // Increment the address counter by the number of bytes incorporated into the record. .inspect(|&(length, _)| { *record_address.borrow_mut() += length; }) // Discard the length from the tuple. .map(|(_, record)| record) // Collect the records into a Vec<Record>. .collect::<Vec<_>>() ); } // All ihex files end in an EOF marker. records.push(Record::EndOfFile); // Obtain the formatted representation of each record and join with newlines for display. writer::create_object_file_representation(records.as_slice()).unwrap() + &"\n" } // MARK: - Tests #[cfg(test)] mod tests { use twelve_bit::u12::*; use assembler::data_range::DataRange; use super::ihex_representation_of_data_ranges; #[test] fn test_ihex_representation_of_data_ranges_no_ranges() { // An empty set of data ranges yields just an EOF marker. assert_eq!(ihex_representation_of_data_ranges(&[]), String::from(":00000001FF\n")); } #[test] fn test_ihex_representation_of_data_ranges_one_range() { // Build an average-case ihex record. let mut data_range = DataRange::new(u12![0x100]); data_range.append(&vec![0x21,0x46,0x01,0x36,0x01,0x21,0x47,0x01,0x36,0x00,0x7E,0xFE,0x09,0xD2,0x19,0x01]); data_range.append(&vec![0x21,0x46,0x01,0x7E,0x17,0xC2,0x00,0x01,0xFF,0x5F,0x16,0x00,0x21,0x48,0x01,0x19]); data_range.append(&vec![0x19,0x4E,0x79,0x23,0x46,0x23,0x96,0x57,0x78,0x23,0x9E,0xDA,0x3F,0x01,0xB2,0xCA]); data_range.append(&vec![0x3F,0x01,0x56,0x70,0x2B,0x5E,0x71,0x2B,0x72,0x2B,0x73,0x21,0x46,0x01,0x34,0x21]); // Validate the average case yielded the anticipated result. let ihex_rep_average = ihex_representation_of_data_ranges(&[data_range]); let expected_ihex_rep_average = String::new() + &":10010000214601360121470136007EFE09D2190140\n" + &":100110002146017E17C20001FF5F16002148011928\n" + &":10012000194E79234623965778239EDA3F01B2CAA7\n" + &":100130003F0156702B5E712B722B732146013421C7\n" + &":00000001FF\n"; assert_eq!(ihex_rep_average, expected_ihex_rep_average);<|fim▁hole|> #[test] fn test_ihex_representation_of_data_ranges_adjacent_ranges() { // Build a pair of adjacent data ranges. let mut range_a = DataRange::new(u12![0x100]); range_a.append(&vec![0x21,0x46,0x01,0x36,0x01,0x21,0x47,0x01,0x36,0x00,0x7E,0xFE,0x09,0xD2,0x19,0x01]); let mut range_b = DataRange::new(u12![0x110]); range_b.append(&vec![0x21,0x46,0x01,0x7E,0x17,0xC2,0x00,0x01,0xFF,0x5F,0x16,0x00,0x21,0x48,0x01,0x19]); // Validate the average case yielded the anticipated result. let ihex_rep_adjacent = ihex_representation_of_data_ranges(&[range_a, range_b]); let expected_ihex_rep_adjacent = String::new() + &":10010000214601360121470136007EFE09D2190140\n" + &":100110002146017E17C20001FF5F16002148011928\n" + &":00000001FF\n"; assert_eq!(ihex_rep_adjacent, expected_ihex_rep_adjacent); } #[test] fn test_ihex_representation_of_data_ranges_disjoint_ranges() { // Build an disjoint pair of data ranges. let mut range_a = DataRange::new(u12![0x100]); range_a.append(&vec![0x21,0x46,0x01,0x36,0x01,0x21,0x47,0x01,0x36,0x00,0x7E,0xFE,0x09,0xD2,0x19,0x01]); let mut range_b = DataRange::new(u12![0x130]); range_b.append(&vec![0x3F,0x01,0x56,0x70,0x2B,0x5E,0x71,0x2B,0x72,0x2B,0x73,0x21,0x46,0x01,0x34,0x21]); // Validate the average case yielded the anticipated result. let ihex_rep_disjoint = ihex_representation_of_data_ranges(&[range_a, range_b]); let expected_ihex_rep_disjoint = String::new() + &":10010000214601360121470136007EFE09D2190140\n" + &":100130003F0156702B5E712B722B732146013421C7\n" + &":00000001FF\n"; assert_eq!(ihex_rep_disjoint, expected_ihex_rep_disjoint); } #[test] fn test_ihex_representation_of_data_ranges_uneven_ranges() { // Build an uneven set of data ranges. let mut range_a = DataRange::new(u12![0x100]); range_a.append(&vec![0x21,0x46,0x01,0x36,0x01,0x21,0x47,0x01,0x36,0x00,0x7E,0xFE,0x09,0xD2,0x19]); let mut range_b = DataRange::new(u12![0x130]); range_b.append(&vec![0x3F,0x01,0x56,0x70,0x2B,0x5E,0x71,0x2B,0x72,0x2B,0x73,0x21,0x46,0x01,0x34,0x21,0x22]); let mut range_c = DataRange::new(u12![0x200]); range_c.append(&vec![0x3F]); // Validate the average case yielded the anticipated result. let ihex_rep = ihex_representation_of_data_ranges(&[range_a, range_b, range_c]); let expected_ihex_rep = String::new() + &":0F010000214601360121470136007EFE09D21942\n" + &":100130003F0156702B5E712B722B732146013421C7\n" + &":01014000229C\n" + &":010200003FBE\n" + &":00000001FF\n"; assert_eq!(ihex_rep, expected_ihex_rep); } #[test] #[should_panic] fn test_ihex_representation_of_data_ranges_panics_when_overlapping() { // Build an overlapping pair of ranges. let mut range_a = DataRange::new(u12![0x100]); range_a.append(&vec![0x21,0x46,0x01,0x36,0x01,0x21,0x47,0x01,0x36,0x00,0x7E,0xFE,0x09,0xD2,0x19,0x01]); let mut range_b = DataRange::new(u12![0x101]); range_b.append(&vec![0x3F,0x01,0x56,0x70,0x2B,0x5E,0x71,0x2B,0x72,0x2B,0x73,0x21,0x46,0x01,0x34,0x21]); ihex_representation_of_data_ranges(&[range_a, range_b]); } }<|fim▁end|>
}
<|file_name|>polarization.py<|end_file_name|><|fim▁begin|># Licensed under GPL version 3 - see LICENSE.rst import numpy as np import astropy.units as u from .utils import norm_vector, e2h __all__ = ['polarization_vectors', 'Q_reflection', 'paralleltransport_matrix', 'parallel_transport'] def polarization_vectors(dir_array, angles): '''Converts polarization angles to vectors in the direction of polarization. Follows convention: Vector perpendicular to photon direction and closest to +y axis is angle 0 for polarization direction, unless photon direction is parallel to the y axis, in which case the vector closest to the +x axis is angle 0. Parameters ---------- dir_array : nx4 np.array each row is the homogeneous coordinates for a photon's direction vector angles : np.array 1D array with the polarization angles ''' n = len(angles) polarization = np.zeros((n, 4)) x = np.array([1., 0., 0.]) y = np.array([0., 1., 0.]) # NOTE: The commented code works and is more readable, but the current code is faster. # for i in range(0, n): # r = h2e(dir_array[i]) # r /= np.linalg.norm(r) # if not (np.isclose(r[0], 0.) and np.isclose(r[2], 0.)): # # polarization relative to positive y at 0 # v_1 = y - (r * np.dot(r, y)) # v_1 /= np.linalg.norm(v_1) # else: # # polarization relative to positive x at 0 # v_1 = x - (r * np.dot(r, x)) # v_1 /= np.linalg.norm(v_1) # # # right hand coordinate system is v_1, v_2, r (photon direction) # v_2 = np.cross(r, v_1) # polarization[i, 0:3] = v_1 * np.cos(angles[i]) + v_2 * np.sin(angles[i]) # polarization[i, 3] = 0 r = dir_array.copy()[:,0:3] r /= np.linalg.norm(r, axis=1)[:, np.newaxis] pol_convention_x = np.isclose(r[:,0], 0.) & np.isclose(r[:,2], 0.) if hasattr(angles, "unit") and (angles.unit is not None): angles = angles.to(u.rad) # polarization relative to positive y or x at 0 v_1 = ~pol_convention_x[:, np.newaxis] * (y - r * np.dot(r, y)[:, np.newaxis]) v_1 += pol_convention_x[:, np.newaxis] * (x - r * np.dot(r, x)[:, np.newaxis]) v_1 /= np.linalg.norm(v_1, axis=1)[:, np.newaxis] <|fim▁hole|> polarization[:, 0:3] = v_1 * np.cos(angles)[:, np.newaxis] + v_2 * np.sin(angles)[:, np.newaxis] return polarization def Q_reflection(delta_dir): '''Reflection of a polarization vector on a non-polarizing surface. This can also be used for other elements that change the direction of the photon without adding any more polarization and where both sides propagate in the same medium. See `Yun (2011) <http://hdl.handle.net/10150/202979>`_, eqn 4.3.13 for details. Parameters ---------- delta_dir : np.array of shape (n, 4) Array of photon direction coordinates in homogeneous coordinates: ``delta_dir = photon['dir_old'] - photons['dir_new']``. Note that this vector is **not** normalized. Returns ------- q : np.array of shape (n, 4, 4) Array of parallel transport ray tracing matrices. ''' if delta_dir.shape != 2: raise ValueError('delta_dir must have dimension (n, 4).') m = delta_dir[..., None, :] * delta_dir[..., :, None] return np.eye(4) - 2 / (np.linalg.norm(delta_dir, axis=1)**2)[:, None, None] * m def paralleltransport_matrix(dir1, dir2, jones=np.eye(2), replace_nans=True): '''Calculate parallel transport ray tracing matrix. Parallel transport for a vector implies that the component s (perpendicular, from German *senkrecht*) to the planes spanned by ``dir1`` and ``dir2`` stays the same. If ``dir1`` is parallel to ``dir2`` this plane is not well defined and the resulting matrix elements will be set to ``np.nan``, unless ``replace_nans`` is set. Note that the ray matrix returned works on an eukledian 3d vector, not a homogeneous vector. (Polarization is a vector, thus the forth element of the homogeneous vector is always 0 and returning (4,4) matrices is just a waste of space.) Parameters ---------- dir1, dir2 : np.array of shape (n, 3) Direction before and after the interaction. jones : np.array of shape (2,2) Jones matrix in the local s,p system of the optical element. replace_nans : bool If ``True`` return an identity matrix for those rays with ``dir1=dir2``. In those cases, the local coordinate system is not well defined and thus no Jones matrix can be applied. In MARXS ``dir1=dir2`` often happens if some photons in a list miss the optical element in question - these photons just pass through and their polarization vector should be unchanged. Returns ------- p_mat : np.array of shape(n, 3, 3) ''' dir1 = norm_vector(dir1) dir2 = norm_vector(dir2) jones_3 = np.eye(3) jones_3[:2, :2] = jones pmat = np.zeros((dir1.shape[0], 3, 3)) s = np.cross(dir1, dir2) s_norm = np.linalg.norm(s, axis=1) # Find dir values that remain unchanged # For these the cross prodict will by 0 # and a numerical error is raised in s / norm(s) # Expected output value for these depends on "replace_nans" ind = np.isclose(s_norm, 0) if (~ind).sum() > 0: s = s[~ind, :] / s_norm[~ind][:, None] p_in = np.cross(dir1[~ind, :], s) p_out = np.cross(dir2[~ind, :], s) Oininv = np.array([s, p_in, dir1[~ind, :]]).swapaxes(1, 0) Oout = np.array([s, p_out, dir2[~ind, :]]).swapaxes(1, 2).T temp = np.einsum('...ij,kjl->kil', jones_3, Oininv) pmat[~ind, :, :] = np.einsum('ijk,ikl->ijl', Oout, temp) factor = 1 if replace_nans else np.nan pmat[ind, :, :] = factor * np.eye(3)[None, :, :] return pmat def parallel_transport(dir_old, dir_new, pol_old, **kwargs): '''Parallel transport of the polarization vector with no polarization happening. Parameters ---------- dir_old, dir_new : np.array of shape (n, 4) Old and new photon direction in homogeneous coordinates. pol_old : np.array of shape (n, 4) Old polarization vector in homogeneous coordinates. kwargs : dict All other arguments are passed on to `~marxs.math.polarization.paralleltransport_matrix`. Returns ------- pol : np.array of shape (m, 4) Parallel transported vectors. ''' pmat = paralleltransport_matrix(dir_old[:, :3], dir_new[:, :3]) out = np.einsum('ijk,ik->ij', pmat, pol_old[:, :3]) return e2h(out, 0)<|fim▁end|>
# right hand coordinate system is v_1, v_2, r (photon direction) v_2 = np.cross(r, v_1)
<|file_name|>highlight.js<|end_file_name|><|fim▁begin|>/** * Wraps the * * @param text * {string} haystack to search through * @param search * {string} needle to search for * @param [caseSensitive] * {boolean} optional boolean to use case-sensitive searching */ angular.module('ui.highlight', []).filter('highlight', function(highlight) { return function(text, search, caseSensitive) { if (search || angular.isNumber(search)) { var ltext = text.toString(); var lsearch = search.toString();<|fim▁hole|> if (caseSensitive) { return ltext.split(lsearch).join('<span class="ui-match">' + lsearch + '</span>'); } else { return ltext.replace(new RegExp(lsearch, 'gi'), '<span class="ui-match">$&</span>'); } } else { return text; } }; });<|fim▁end|>
<|file_name|>ag_data_access.py<|end_file_name|><|fim▁begin|>from __future__ import division # ----------------------------------------------------------------------------- # Copyright (c) 2014--, The American Gut Development Team. # # Distributed under the terms of the BSD 3-clause License. # # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------- """ Centralized database access for the American Gut web portal """ import logging from uuid import UUID import psycopg2 import bcrypt import numpy as np import pandas as pd import random import string from amgut.lib.data_access.sql_connection import TRN # character sets for kit id, passwords and verification codes KIT_ALPHA = "abcdefghjkmnpqrstuvwxyz" # removed i, l and o for clarity<|fim▁hole|>KIT_VERCODE_NOZEROS = KIT_PASSWD_NOZEROS class AGDataAccess(object): """Data Access implementation for all the American Gut web portal """ # arbitrary, unique ID and value human_sites = ['Stool', 'Mouth', 'Right hand', 'Left hand', 'Forehead', 'Torso', 'Left leg', 'Right leg', 'Nares', 'Hair', 'Tears', 'Nasal mucus', 'Ear wax', 'Vaginal mucus'] animal_sites = ['Stool', 'Mouth', 'Nares', 'Ears', 'Skin', 'Fur'] general_sites = ['Animal Habitat', 'Biofilm', 'Dust', 'Food', 'Fermented Food', 'Indoor Surface', 'Outdoor Surface', 'Plant habitat', 'Soil', 'Sole of shoe', 'Water'] ##################################### # Users ##################################### def authenticateWebAppUser(self, username, password): """ Attempts to validate authenticate the supplied username/password Attempt to authenticate the user against the list of users in web_app_user table. If successful, a dict with user innformation is returned. If not, the function returns False. """ with TRN: sql = """SELECT cast(ag_login_id as varchar(100)) as ag_login_id, email, name, address, city, state, zip, country,kit_password FROM ag_login INNER JOIN ag_kit USING (ag_login_id) WHERE supplied_kit_id = %s""" TRN.add(sql, [username]) row = TRN.execute_fetchindex() if not row: return False results = dict(row[0]) password = password.encode('utf-8') if not bcrypt.checkpw(password, results['kit_password']): return False results['ag_login_id'] = str(results['ag_login_id']) return results def check_login_exists(self, email): """Checks if email for login already exists on system Parameters ---------- email : str Email for user to check Returns ------- ag_login_id or None If exists, returns ag_login_id, else returns None """ with TRN: clean_email = email.strip().lower() sql = "SELECT ag_login_id FROM ag_login WHERE LOWER(email) = %s" TRN.add(sql, [clean_email]) value = TRN.execute_fetchindex() if value: value = value[0][0] return None if value == [] else value def addAGLogin(self, email, name, address, city, state, zip_, country): """Adds a new login or returns the login_id if email already exists Parameters ---------- email : str Email to register for user name : str Name to register for user address : str Street address to register for user city : str City to register for user state : str State to register for user zip_ : str Postal code to register for user country : str Country to register for user Returns ------- ag_login_id : str UUID for new user, or existing user if email already in system """ with TRN: clean_email = email.strip().lower() ag_login_id = self.check_login_exists(email) if not ag_login_id: # create the login sql = """INSERT INTO ag_login (email, name, address, city, state, zip, country) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING ag_login_id""" TRN.add(sql, [clean_email, name, address, city, state, zip_, country]) ag_login_id = TRN.execute_fetchlast() return ag_login_id def getAGBarcodeDetails(self, barcode): """Returns information about the barcode from both AG and standard info Parameters ---------- barcode : str Barcode to get information for Returns ------- dict All barcode info, keyed to column name Raises ------ ValueError Barcode not found in AG information tables """ sql = """SELECT email, cast(ag_kit_barcode_id as varchar(100)), cast(ag_kit_id as varchar(100)), barcode, site_sampled, environment_sampled, sample_date, sample_time, participant_name, notes, refunded, withdrawn, moldy, other, other_text, date_of_last_email, overloaded, name, status FROM ag.ag_kit_barcodes LEFT JOIN barcodes.barcode USING (barcode) LEFT JOIN ag.ag_kit USING (ag_kit_id) LEFT JOIN ag.ag_login_surveys USING (ag_login_id) LEFT JOIN ag.ag_login USING (ag_login_id) WHERE barcode = %s""" with TRN: TRN.add(sql, [barcode]) row = TRN.execute_fetchindex() if not row: raise ValueError('Barcode does not exist in AG: %s' % barcode) return dict(row[0]) def getAGSurveyDetails(self, survey_id, language): """Returns survey information of a specific survey_id and language Parameters ---------- survey_id : str the id of the survey group language : str the language the survey is intended for Returns ------- DataFrame pandas DataFrame of sorted survey details Raises ------ ValueError survey_id not found in database ValueError language not found in database """ if survey_id not in self.getKnownSurveyIds(): raise ValueError('Invalid survey_id') if language not in self.getKnownLanguages(): raise ValueError('Invalid language') sql = """SELECT survey_question_id, survey_group, %s, question_shortname, response, ag.survey_question_response.display_index AS response_index FROM ag.survey_question LEFT JOIN ag.survey_question_response USING (survey_question_id) LEFT JOIN ag.group_questions USING (survey_question_id) LEFT JOIN ag.surveys USING (survey_group) WHERE survey_id = %s""" % (language, survey_id) with TRN: TRN.add(sql) survey_details = TRN.execute_fetchindex() df = pd.DataFrame([dict(r) for r in survey_details], columns=['survey_question_id', 'survey_group', language, 'question_shortname', 'response', 'response_index']) # sorts so that questions emmulate survey order df = df.sort_values(by=['survey_group', 'survey_question_id', 'response_index']).drop(columns='survey_group') # converts response_index from float to int df['response_index'] = df['response_index'].apply( lambda x: None if np.isnan(x) else int(x), convert_dtype=False) return df def getKnownSurveyIds(self): """Returns all known survey IDs of each survey type Returns ------- list of ints List of survey_ids in ascending order Raises ------ ValueError Survey IDs were not able to be found """ sql = """SELECT survey_id FROM ag.surveys""" with TRN: TRN.add(sql) survey_ids = TRN.execute_fetchindex() if not survey_ids: raise ValueError('Survey IDs were not able to be found') # survey_ids must be converted from list of DictRow to a set survey_ids = [x[0] for x in survey_ids] unique_survey_ids = set([]) for i in survey_ids: unique_survey_ids.add(i) return unique_survey_ids def getKnownLanguages(self): """Returns all known language locales Returns ------- list of strings List of language locales used for surveys Raises ------ ValueError Languages were not able to be found """ sql = """SELECT column_name FROM information_schema.columns WHERE table_name = 'survey_response'""" with TRN: TRN.add(sql) languages = TRN.execute_fetchindex() if not languages: raise ValueError('Languages were not able to be found') languages = [x[0] for x in languages] languages_set = set([]) for i in languages: languages_set.add(i) return languages_set def getAGKitDetails(self, supplied_kit_id): sql = """SELECT cast(ag_kit_id as varchar(100)), supplied_kit_id, kit_password, swabs_per_kit, kit_verified, kit_verification_code, verification_email_sent FROM ag_kit WHERE supplied_kit_id = %s""" with TRN: TRN.add(sql, [supplied_kit_id]) row = TRN.execute_fetchindex() if not row: raise ValueError('Supplied kit id does not exist in AG: %s' % supplied_kit_id) return dict(row[0]) def registerHandoutKit(self, ag_login_id, supplied_kit_id): """Registeres a handout kit to a user Parameters ---------- ag_login_id : str UUID4 formatted string of login ID to associate with kit supplied_kit_id : str kit ID for the handout kit Returns ------- bool True: success False: insert failed due to IntegrityError Raises ------ ValueError Non-UUID4 value sent as ag_login_id """ with TRN: # make sure properly formatted UUID passed in UUID(ag_login_id, version=4) printresults = self.checkPrintResults(supplied_kit_id) # make sure login_id and skid exists sql = """SELECT EXISTS(SELECT * FROM ag.ag_login WHERE ag_login_id = %s)""" TRN.add(sql, [ag_login_id]) exists = TRN.execute_fetchlast() if not exists: return False sql = """SELECT EXISTS(SELECT * FROM ag.ag_handout_kits WHERE kit_id = %s)""" TRN.add(sql, [supplied_kit_id]) if not TRN.execute_fetchlast(): return False sql = """ DO $do$ DECLARE k_id uuid; bc varchar; BEGIN INSERT INTO ag_kit (ag_login_id, supplied_kit_id, kit_password, swabs_per_kit, kit_verification_code, print_results) SELECT '{0}', kit_id, password, swabs_per_kit, verification_code, '{1}' FROM ag_handout_kits WHERE kit_id = %s LIMIT 1 RETURNING ag_kit_id INTO k_id; FOR bc IN SELECT barcode FROM ag_handout_barcodes WHERE kit_id = %s LOOP INSERT INTO ag_kit_barcodes (ag_kit_id, barcode, sample_barcode_file) VALUES (k_id, bc, bc || '.jpg'); END LOOP; DELETE FROM ag_handout_barcodes WHERE kit_id = %s; DELETE FROM ag_handout_kits WHERE kit_id = %s; END $do$; """.format(ag_login_id, printresults) TRN.add(sql, [supplied_kit_id] * 4) try: TRN.execute() except psycopg2.IntegrityError: logging.exception('Error on skid %s:' % ag_login_id) return False return True def get_all_handout_kits(self): with TRN: sql = 'SELECT kit_id FROM ag.ag_handout_kits' TRN.add(sql) return TRN.execute_fetchflatten() def deleteAGParticipantSurvey(self, ag_login_id, participant_name): # Remove user from new schema with TRN: sql = """SELECT survey_id, participant_email FROM ag_login_surveys JOIN ag_consent USING (ag_login_id, participant_name) WHERE ag_login_id = %s AND participant_name = %s""" TRN.add(sql, (ag_login_id, participant_name)) # collect all survey_ids and participant_names, since at least the # former might be more than one. survey_ids = set() participant_emails = set() for hit in TRN.execute_fetchindex(): survey_ids.add(hit[0]) participant_emails.add(hit[1]) sql = """SELECT barcode FROM ag.source_barcodes_surveys WHERE survey_id IN %s""" TRN.add(sql, [tuple(survey_ids)]) barcodes = [x[0] for x in TRN.execute_fetchindex()] sql = "DELETE FROM survey_answers WHERE survey_id IN %s" TRN.add(sql, [tuple(survey_ids)]) sql = "DELETE FROM survey_answers_other WHERE survey_id IN %s" TRN.add(sql, [tuple(survey_ids)]) # Reset survey attached to barcode(s) for info in self.getParticipantSamples(ag_login_id, participant_name): self.deleteSample(info['barcode'], ag_login_id) # Delete last due to foreign keys sql = """DELETE FROM ag.source_barcodes_surveys WHERE survey_id IN %s""" TRN.add(sql, [tuple(survey_ids)]) if len(barcodes) != 0: # only delete barcode information, if this is the # last survey for the given source, i.e. ag_login_id, # participant_name combination if len(survey_ids) == 1: sql = """DELETE FROM ag.ag_kit_barcodes WHERE barcode IN %s""" TRN.add(sql, [tuple(barcodes)]) sql = "DELETE FROM ag_login_surveys WHERE survey_id IN %s" TRN.add(sql, [tuple(survey_ids)]) sql = """DELETE FROM ag_consent WHERE ag_login_id = %s AND participant_name = %s""" TRN.add(sql, [ag_login_id, participant_name]) # checks if user has previously been # removed and is has still revoked consent sql = """SELECT ag_login_id FROM ag.consent_revoked""" TRN.add(sql) revoked = {result[0] for result in TRN.execute_fetchindex()} # only inserts to ag.consent_revoked if not already there if ag_login_id not in revoked: sql = """INSERT INTO ag.consent_revoked (ag_login_id, participant_name, participant_email) VALUES (%s, %s, %s)""" sql_args = [[ag_login_id, participant_name, pemail] for pemail in participant_emails] TRN.add(sql, sql_args, many=True) TRN.execute() def get_withdrawn(self): """Gets the list of withdrawn participants and information Returns ------- list of tuple of strings List of withdrawn participants, in the form (ag_login_id, participant_name, participant_email, date_revoked) """ with TRN: sql = "SELECT * FROM consent_revoked" TRN.add(sql) return TRN.execute_fetchindex() def getConsent(self, survey_id): with TRN: TRN.add("""SELECT agc.participant_name, agc.participant_email, agc.parent_1_name, agc.parent_2_name, agc.is_juvenile, agc.deceased_parent, agc.ag_login_id, agc.date_signed, agc.assent_obtainer, agc.age_range, agl.survey_id FROM ag_consent agc JOIN ag_login_surveys agl USING (ag_login_id, participant_name) WHERE agl.survey_id = %s""", [survey_id]) result = TRN.execute_fetchindex() if not result: raise ValueError("Survey ID does not exist in DB: %s" % survey_id) return dict(result[0]) def logParticipantSample(self, ag_login_id, barcode, sample_site, environment_sampled, sample_date, sample_time, participant_name, notes): with TRN: if sample_site is not None: # Get non timepoint specific survey IDs. # As of this comment, a non timepoint specific survey is # implicit, and currently limited to vioscreen FFQs # We do not want to associate timepoint specific surveys # with the wrong barcode sql = """SELECT survey_id, vioscreen_status FROM ag_login_surveys WHERE ag_login_id = %s AND participant_name = %s""" TRN.add(sql, (ag_login_id, participant_name)) results = TRN.execute_fetchindex() survey_ids = [x[0] for x in results] statuses = [x[1] for x in results] # if we have more than 1 ID, filter out those associated to # vioscreen if len(survey_ids) > 1: keep = [] for sid, vs in zip(survey_ids, statuses): if vs is None: keep.append(sid) survey_ids = keep # if we only have a single survey ID then advance regardless # of vioscreen status if len(survey_ids) == 1: pass if len(survey_ids) == 0: # if we don't have a definite non-vioscreen survey ID # which can arise on legacy accounts, then we'll create a # new ID without a vioscreen_status entry. Note that # the associate_barcode_to_survey_id call is necessary to # add the survey ID into ag_login_surveys and it also takes # care of the survey_id <-> barcode association new_survey_id = self.get_new_survey_id() self.associate_barcode_to_survey_id(ag_login_id, participant_name, barcode, new_survey_id) else: # otherwise, it is an environmental sample survey_ids = [] # Add barcode info sql = """UPDATE ag_kit_barcodes SET site_sampled = %s, environment_sampled = %s, sample_date = %s, sample_time = %s, notes = %s WHERE barcode = %s""" TRN.add(sql, [sample_site, environment_sampled, sample_date, sample_time, notes, barcode]) if len(survey_ids) > 0: sql = """INSERT INTO ag.source_barcodes_surveys (survey_id, barcode) VALUES (%s, %s)""" for survey_id in survey_ids: TRN.add(sql, [survey_id, barcode]) def deleteSample(self, barcode, ag_login_id): """ Removes by either releasing barcode back for relogging or withdraw Parameters ---------- barcode : str Barcode to delete ag_login_id : UUID4 Login ID for the barcode Notes ----- Strictly speaking the ag_login_id isn't needed but it makes it really hard to hack the function when you would need to know someone else's login id (a GUID) to delete something maliciously. If the barcode has never been scanned, assume a mis-log and wipe it so barcode can be logged again. If barcode has been scanned, that means we have recieved it and must withdraw it to delete it from the system. """ with TRN: # Figure out if we've received the barcode or not sql = "SELECT scan_date FROM barcode WHERE barcode = %s" TRN.add(sql, [barcode]) received = TRN.execute_fetchlast() if not received: # Not recieved, so we release the barcode back to be relogged set_text = """site_sampled = NULL, sample_time = NULL, sample_date = NULL, environment_sampled = NULL, notes = NULL""" sql = "UPDATE barcode SET status = NULL WHERE barcode = %s" TRN.add(sql, [barcode]) else: # barcode already recieved, so we withdraw the barcode set_text = "withdrawn = 'Y'" sql = """UPDATE ag_kit_barcodes SET {} WHERE barcode IN ( SELECT akb.barcode FROM ag_kit_barcodes akb INNER JOIN ag_kit ak USING (ag_kit_id) WHERE ak.ag_login_id = %s AND akb.barcode = %s)""".format(set_text) TRN.add(sql, [ag_login_id, barcode]) sql = """DELETE FROM ag.source_barcodes_surveys WHERE barcode = %s""" TRN.add(sql, [barcode]) def getHumanParticipants(self, ag_login_id): # get people from new survey setup sql = """SELECT DISTINCT participant_name from ag.ag_login_surveys LEFT JOIN ag.survey_answers USING (survey_id) JOIN ag.group_questions gq USING (survey_question_id) JOIN ag.surveys ags USING (survey_group) WHERE ag_login_id = %s AND ags.survey_id = %s""" with TRN: TRN.add(sql, [ag_login_id, 1]) return TRN.execute_fetchflatten() def associate_barcode_to_survey_id(self, ag_login_id, participant_name, barcode, survey_id): """Associate a barcode to a survey ID Parameters ---------- ag_login_id : str A valid AG login ID participant_name : str The name of a participant associated with the login barcode : str A valid barcode associated with the login survey_id : str A valid survey ID """ with TRN: # test first if the barcode is already associated to a participant sql = """SELECT ag_login_id, participant_name, barcode FROM ag.ag_login_surveys JOIN ag.source_barcodes_surveys USING(survey_id) WHERE ag_login_id=%s AND participant_name=%s AND barcode=%s""" TRN.add(sql, [ag_login_id, participant_name, barcode]) results = TRN.execute_fetchflatten() if len(results) == 0: # this implies the barcode was unassigned, and this is a new # assignment. # Let's verify the barcode is associated to the kit and login sql = """SELECT 1 FROM ag.ag_login JOIN ag.ag_kit USING (ag_login_id) JOIN ag.ag_kit_barcodes USING (ag_kit_id) WHERE ag_login_id=%s AND barcode=%s""" TRN.add(sql, [ag_login_id, barcode]) results = TRN.execute_fetchflatten() if len(results) == 0: # the barcode is not part of a kit with the login ID raise ValueError("Unexpected barcode / kit relationship") # the barcode should also not already be linked to a # participant within the kit sql = """SELECT 1 FROM ag.ag_login_surveys JOIN ag.source_barcodes_surveys USING(survey_id) WHERE ag_login_id=%s AND barcode=%s""" TRN.add(sql, [ag_login_id, barcode]) results = TRN.execute_fetchflatten() if len(results) > 0: # the barcode is already assigned to someone on the kit raise ValueError("Barcode already assigned") sql = """INSERT INTO ag_login_surveys (ag_login_id, survey_id, participant_name) VALUES (%s, %s, %s)""" TRN.add(sql, [ag_login_id, survey_id, participant_name]) sql = """INSERT INTO ag.source_barcodes_surveys (survey_id, barcode) VALUES (%s, %s)""" TRN.add(sql, [survey_id, barcode]) def updateVioscreenStatus(self, survey_id, status): with TRN: sql = """UPDATE ag_login_surveys SET vioscreen_status = %s WHERE survey_id = %s""" TRN.add(sql, (status, survey_id)) def get_vioscreen_status(self, survey_id): """Retrieves the vioscreen status for a survey_id Parameters ---------- survey_id : str The survey to get status for Returns ------- int Vioscreen status Raises ------ ValueError survey_id passed is not in the database """ with TRN: sql = """SELECT vioscreen_status FROM ag.ag_login_surveys WHERE survey_id = %s""" TRN.add(sql, [survey_id]) status = TRN.execute_fetchindex() if not status: raise ValueError("Survey ID %s not in database" % survey_id) return status[0][0] def getAnimalParticipants(self, ag_login_id): sql = """SELECT DISTINCT participant_name from ag.ag_login_surveys JOIN ag.survey_answers USING (survey_id) JOIN ag.group_questions gq USING (survey_question_id) JOIN ag.surveys ags USING (survey_group) WHERE ag_login_id = %s AND ags.survey_id = %s""" with TRN: TRN.add(sql, [ag_login_id, 2]) return TRN.execute_fetchflatten() def getParticipantSamples(self, ag_login_id, participant_name): sql = """SELECT DISTINCT ag_kit_barcodes.barcode, ag_kit_barcodes.site_sampled, ag_kit_barcodes.sample_date, ag_kit_barcodes.sample_time, ag_kit_barcodes.notes, barcodes.barcode.status FROM ag.ag_login_surveys JOIN ag.source_barcodes_surveys USING (survey_id) JOIN ag.ag_kit_barcodes USING (barcode) JOIN barcodes.barcode USING (barcode) WHERE ag_login_id = %s AND participant_name = %s AND (site_sampled IS NOT NULL AND site_sampled::text <> '')""" with TRN: TRN.add(sql, [ag_login_id, participant_name]) rows = TRN.execute_fetchindex() return [dict(row) for row in rows] def getEnvironmentalSamples(self, ag_login_id): sql = """SELECT barcode, site_sampled, sample_date, sample_time, notes, status FROM ag_kit_barcodes INNER JOIN barcode USING (barcode) INNER JOIN ag_kit USING(ag_kit_id) WHERE (environment_sampled IS NOT NULL AND environment_sampled::text <> '') AND ag_login_id = %s""" with TRN: TRN.add(sql, [ag_login_id]) rows = TRN.execute_fetchindex() return [dict(row) for row in rows] def getAvailableBarcodes(self, ag_login_id): sql = """SELECT barcode FROM ag_kit_barcodes INNER JOIN ag_kit USING (ag_kit_id) WHERE coalesce(sample_date::text, '') = '' AND kit_verified = 'y' AND ag_login_id = %s""" with TRN: TRN.add(sql, [ag_login_id]) return TRN.execute_fetchflatten() def verifyKit(self, supplied_kit_id): """Set the KIT_VERIFIED for the supplied_kit_id to 'y'""" sql = """UPDATE AG_KIT SET kit_verified='y' WHERE supplied_kit_id=%s""" with TRN: TRN.add(sql, [supplied_kit_id]) def _get_unverified_kits(self): """Gets list of unverified kit IDs, Helper function for tests""" sql = """SELECT supplied_kit_id FROM AG_KIT WHERE NOT kit_verified = 'y'""" with TRN: TRN.add(sql) return TRN.execute_fetchflatten() def getMapMarkers(self): with TRN: sql = """SELECT country, count(country)::integer FROM ag.ag_login GROUP BY country""" TRN.add(sql) return dict(TRN.execute_fetchindex()) def handoutCheck(self, username, password): with TRN: password = password.encode('utf-8') sql = "SELECT password FROM ag.ag_handout_kits WHERE kit_id = %s" TRN.add(sql, [username]) to_check = TRN.execute_fetchindex() if not to_check: return False else: return bcrypt.checkpw(password, to_check[0][0]) def check_access(self, supplied_kit_id, barcode): """Check if the user has access to the barcode Parameters ---------- supplied_kit_id : str The user's supplied kit ID barcode : str The barcode to check access for Returns ------- boolean True if the user can access the barcode, False otherwise """ with TRN: ag_login_id = self.get_user_for_kit(supplied_kit_id) sql = """SELECT EXISTS ( SELECT barcode FROM ag.ag_kit JOIN ag.ag_kit_barcodes USING (ag_kit_id) WHERE ag_login_id = %s AND barcode = %s)""" TRN.add(sql, [ag_login_id, barcode]) return TRN.execute_fetchlast() def getAGKitIDsByEmail(self, email): """Returns a list of kitids based on email email is email address of login returns a list of kit_id's associated with the email or an empty list """ with TRN: sql = """SELECT supplied_kit_id FROM ag_kit INNER JOIN ag_login USING (ag_login_id) WHERE email = %s""" TRN.add(sql, [email.lower()]) return TRN.execute_fetchflatten() def ag_set_pass_change_code(self, email, kitid, pass_code): """updates ag_kit table with the supplied pass_code email is email address of participant kitid is supplied_kit_id in the ag_kit table pass_code is the password change verfication value """ sql = """UPDATE ag_kit SET pass_reset_code = %s, pass_reset_time = clock_timestamp() + interval '2' hour WHERE supplied_kit_id = %s AND ag_login_id in (SELECT ag_login_id FROM ag_login WHERE email = %s)""" with TRN: TRN.add(sql, [pass_code, kitid, email]) def ag_update_kit_password(self, kit_id, password): """updates ag_kit table with password kit_id is supplied_kit_id in the ag_kit table password is the new password """ with TRN: password = password.encode('utf-8') password = bcrypt.hashpw(password, bcrypt.gensalt()) sql = """UPDATE AG_KIT SET kit_password = %s, pass_reset_code = NULL WHERE supplied_kit_id = %s""" TRN.add(sql, [password, kit_id]) def ag_verify_kit_password_change_code(self, email, kitid, passcode): """returns true if it still in the password change window email is the email address of the participant kitid is the supplied_kit_id in the ag_kit table passcode is the password change verification value """ sql = """SELECT EXISTS(SELECT pass_reset_time FROM ag.ag_kit INNER JOIN ag.ag_login USING (ag_login_id) WHERE pass_reset_code = %s and email = %s AND supplied_kit_id = %s AND NOW() < pass_reset_time)""" with TRN: TRN.add(sql, [passcode, email, kitid]) return TRN.execute_fetchlast() def getBarcodesByKit(self, kitid): """Returns a list of barcodes in a kit kitid is the supplied_kit_id from the ag_kit table """ sql = """SELECT barcode FROM ag_kit_barcodes INNER JOIN ag_kit USING (ag_kit_id) WHERE supplied_kit_id = %s""" with TRN: TRN.add(sql, [kitid]) return TRN.execute_fetchflatten() def get_nonconsented_scanned_barcodes(self, kit_id): """Returns list of barcodes that have been scanned but not consented Parameters ---------- kit_id : str The supplied kit identifier to check for barcodes. Returns ------- list of str The barcodes, if any, that have been scanned but not consented """ sql = """SELECT barcode FROM ag_kit_barcodes INNER JOIN ag_kit USING (ag_kit_id) RIGHT JOIN ag_login USING (ag_login_id) LEFT JOIN barcode USING (barcode) FULL JOIN ag.source_barcodes_surveys USING (barcode) WHERE ag.source_barcodes_surveys.survey_id IS NULL AND scan_date IS NOT NULL AND ag_login_id = %s""" with TRN: user = self.get_user_for_kit(kit_id) TRN.add(sql, [user]) return TRN.execute_fetchflatten() def checkPrintResults(self, kit_id): """Checks whether or not results are available for a given `kit_id` Parameters ---------- kit_id : str The supplied kit identifier to check for results availability. Returns ------- bool Whether or not the results are ready for the supplied kit_id. Notes ----- If a `kit_id` does not exist this function will return False, as no results would be available for a non-existent `kit_id`. """ with TRN: sql = "SELECT print_results FROM ag_handout_kits WHERE kit_id = %s" TRN.add(sql, [kit_id]) results = TRN.execute_fetchindex() return False if not results else results[0][0] def get_user_for_kit(self, supplied_kit_id): with TRN: sql = """SELECT ag_login_id FROM ag.ag_kit JOIN ag_login USING (ag_login_id) WHERE supplied_kit_id = %s""" TRN.add(sql, [supplied_kit_id]) results = TRN.execute_fetchindex() if results: return results[0][0] else: raise ValueError("No user ID for kit %s" % supplied_kit_id) def get_menu_items(self, supplied_kit_id): """Returns information required to populate the menu of the website""" with TRN: ag_login_id = self.get_user_for_kit(supplied_kit_id) info = self.getAGKitDetails(supplied_kit_id) kit_verified = False if info['kit_verified'] == 'y': kit_verified = True human_samples = {hs: self.getParticipantSamples(ag_login_id, hs) for hs in self.getHumanParticipants(ag_login_id)} animal_samples = {a: self.getParticipantSamples(ag_login_id, a) for a in self.getAnimalParticipants(ag_login_id)} environmental_samples = self.getEnvironmentalSamples(ag_login_id) return (human_samples, animal_samples, environmental_samples, kit_verified) def check_if_consent_exists(self, ag_login_id, participant_name): """Return True if a consent already exists""" with TRN: sql = """SELECT EXISTS( SELECT 1 FROM ag_consent WHERE ag_login_id = %s AND participant_name = %s)""" TRN.add(sql, [ag_login_id, participant_name]) return TRN.execute_fetchlast() def get_user_info(self, supplied_kit_id): with TRN: sql = """SELECT CAST(ag_login_id AS VARCHAR(100)) AS ag_login_id, email, name, address, city, state, zip, country FROM ag_login INNER JOIN ag_kit USING(ag_login_id) WHERE supplied_kit_id = %s""" TRN.add(sql, [supplied_kit_id]) row = TRN.execute_fetchindex() if not row: raise ValueError('Supplied kit id is not in DB: %s' % supplied_kit_id) user_data = dict(row[0]) user_data['ag_login_id'] = str(user_data['ag_login_id']) return user_data def get_barcode_results(self, supplied_kit_id): """Get the results associated with the login ID of the kit Parameters ---------- supplied_kit_id : str The user's supplied kit ID Returns ------- list of dict A list of the dict of the barcode to participant name associated with the login ID where results are ready. """ with TRN: ag_login_id = self.get_user_for_kit(supplied_kit_id) sql = """SELECT DISTINCT barcode, participant_name FROM ag.ag_login_surveys JOIN ag.source_barcodes_surveys USING (survey_id) JOIN ag.ag_kit_barcodes USING (barcode) WHERE ag_login_id = %s AND results_ready = 'Y'""" TRN.add(sql, [ag_login_id]) return [dict(row) for row in TRN.execute_fetchindex()] def get_login_info(self, ag_login_id): """Get kit registration information Parameters ---------- ag_login_id : str A valid login ID, that should be a test as a valid UUID Returns ------- list of dict A list of registration information associated with a common login ID. Raises ------ ValueError Unknown ag_login_id passed """ with TRN: sql = """SELECT ag_login_id, email, name, address, city, state, zip, country FROM ag_login WHERE ag_login_id = %s""" TRN.add(sql, [ag_login_id]) info = TRN.execute_fetchindex() if not info: raise ValueError('ag_login_id not in database: %s' % ag_login_id) return [dict(row) for row in info] def get_survey_ids(self, ag_login_id, participant_name): """Return the survey IDs associated with a participant or None Parameters ---------- ag_login_id : str A valid login ID, that should be a test as a valid UUID participant_name : str A participant name Returns ------- dict or None The survey IDs keyed to the survey id, or None if a survey ID cannot be found. Raises ------ ValueError Unknown ag_login_id or participant_name passed """ with TRN: sql = """SELECT DISTINCT s.survey_id, als.survey_id FROM ag.ag_login_surveys als LEFT JOIN ag.survey_answers sa USING (survey_id) LEFT JOIN ag.group_questions gq USING (survey_question_id) LEFT JOIN ag.surveys s USING (survey_group) WHERE ag_login_id=%s AND participant_name=%s""" TRN.add(sql, [ag_login_id, participant_name]) survey_id = TRN.execute_fetchindex() if not survey_id: raise ValueError("No survey ID found!") return dict(i for i in survey_id) def get_participants_surveys(self, ag_login_id, participant_name, locale='american'): """Returns all surveys (except external) for one participant for a AG login. Parameters ---------- ag_login_id : str A valid login ID, that should be a test as a valid UUID. participant_name : str A participant name. locale : str The names for the surveys are fetched from table ag.survey_group. For localization, there are columns for each language, which is set by locale. Returns ------- List of lists or None A list for surveys for the given participant of the given ag_login_id. Each element is a list again [int, str, str]. Where the first element is the survey group id, the second the survey_id and the third is a speaking name for the survey. None if no survey ID can be found for the combination of participant and ag_login_id. Raises ------ ValueError Unknown ag_login_id or participant_name passed """ with TRN: sql = """SELECT DISTINCT gq.survey_group, als.survey_id, sg.{0} FROM ag.ag_login_surveys als LEFT JOIN ag.survey_answers sa USING (survey_id) LEFT JOIN ag.group_questions gq USING (survey_question_id) LEFT JOIN ag.survey_group sg ON (survey_group=group_order) WHERE als.ag_login_id = %s AND als.participant_name = %s AND gq.survey_group < 0""".format(locale) TRN.add(sql, [ag_login_id, participant_name]) surveys = TRN.execute_fetchindex() if not surveys: raise ValueError("No survey IDs found!") return surveys def get_new_survey_id(self): """Return a new unique survey ID Notes ----- This is *NOT* atomic. At the creation of this method, it is not possible to store a survey ID without first storing consent. That would require a fairly large structural change. This method replaces the existing non-atomic logic, with logic that is much safer but not perfect. Returns ------- str A unique survey ID """ alpha = string.ascii_letters + string.digits with TRN: sql = """SELECT survey_id FROM ag.ag_login_surveys""" TRN.add(sql) existing = {i for i in TRN.execute_fetchflatten()} new_id = ''.join([random.choice(alpha) for i in range(16)]) while new_id in existing: new_id = ''.join([random.choice(alpha) for i in range(16)]) return new_id def get_countries(self): """ Returns ------- list of str All country names in database""" with TRN: sql = 'SELECT country FROM ag.iso_country_lookup ORDER BY country' TRN.add(sql) return TRN.execute_fetchflatten() def is_deposited_ebi(self, barcode): """Check if barcode is deposited to EBI Parameters ---------- barcode : str Barcode to check Returns ------- bool If the barcode has been deposited (True) or has not (False) Raises ------ ValueError Barcode is not a registered AG barcodes """ with TRN: sql = """SELECT EXISTS( SELECT 1 FROM ag.ag_kit_barcodes WHERE barcode = %s)""" TRN.add(sql, [barcode]) if not TRN.execute_fetchlast(): raise ValueError('Barcode %s not a registered AG barcode' % barcode) sql = "SELECT deposited FROM ag.ag_kit_barcodes WHERE barcode = %s" TRN.add(sql, [barcode]) return TRN.execute_fetchlast() # following are DB access functions only used for unit testing: def ut_get_arbitrary_supplied_kit_id_scanned_unconsented(self): """ Returns arbitrarily chosen supplied_kit_id and barcode which has been scanned but is without consent. For unit testing only! Returns ------- list of str: [supplied_kit_id, barcode] example: ['fNIYa', '000001053'] Raises ------ ValueError If no kits can be found in the DB that have been scanned and are without consent.""" with TRN: sql = """SELECT supplied_kit_id, barcode FROM barcodes.barcode JOIN ag.ag_kit_barcodes USING (barcode) JOIN ag.ag_kit USING (ag_kit_id) LEFT JOIN ag.source_barcodes_surveys USING (barcode) WHERE barcodes.barcode.scan_date IS NOT NULL AND ag.source_barcodes_surveys.survey_id IS NULL LIMIT 1""" TRN.add(sql, []) info = TRN.execute_fetchindex() if not info: raise ValueError('No kits found.') return info[0] def ut_get_arbitrary_handout_printed_min6_supplied_kit_id(self): """ Returns a arbitrarily chosen supplied_kit_id with printed results and 6 swaps per kit. For unit testing only! Returns ------- supplied_kit_id : str A supplied_kit_id. Example: 'DS_ubdvq' Raises ------ ValueError If no hand out kit exists, satisfing the given conditions.""" with TRN: sql = """SELECT kit_id FROM ag.ag_handout_kits WHERE swabs_per_kit = 6 AND print_results = TRUE""" TRN.add(sql, []) info = TRN.execute_fetchindex() if not info: raise ValueError('No kits found.') return info[0][0] def ut_get_arbitrary_email(self): """ Return arbitrarily chosen email. For unit testing only! Returns ------- str: email Example: 'a03E9u6ZAu@glA+)./Vn' Raises ------ ValueError If no emails be found in the DB.""" with TRN: sql = """SELECT email FROM ag.ag_login LIMIT 1""" TRN.add(sql, []) info = TRN.execute_fetchindex() if not info: raise ValueError('No emails found.') return info[0][0] def ut_get_arbitrary_barcode(self, deposited=True): """ Returns arbitrarily chosen barcode. For unit testing only! Parameters ---------- deposited : boolean If true, pick a deposited barcode. Default = True Returns ------- str: barcode Example: '000032951' Raises ------ ValueError If no barcodes can be found in the DB.""" with TRN: sql = """SELECT barcode FROM ag.ag_kit_barcodes WHERE deposited=%s LIMIT 1""" TRN.add(sql, [deposited]) info = TRN.execute_fetchindex() if not info: raise ValueError('No barcodes found.') return info[0][0] def ut_get_email_from_ag_login_id(self, ag_login_id): """ Returns email for a given ag_login_id. For unit testing only! Parameters ---------- ag_login_id : str Existing ag_login_id. Returns ------- str: email Example: 'xX/tEv7O+T@6Ri7C.)LO' Raises ------ ValueError If ag_login_id is not in DB. """ with TRN: sql = """SELECT email FROM ag.ag_login WHERE ag_login_id=%s""" TRN.add(sql, [ag_login_id]) info = TRN.execute_fetchindex() if not info: raise ValueError('No emails found.') return info[0][0] def ut_get_supplied_kit_id(self, ag_login_id): """ Returns supplied_kit_id for a given ag_login_id. For unit testing only! Parameters ---------- ag_login_id : str Existing ag_login_id. Returns ------- str The supplied_kit_id for the given ag_login_id. Example: 'DokBF' Raises ------ ValueError If ag_login_id is not in DB. """ with TRN: sql = """SELECT supplied_kit_id FROM ag.ag_kit WHERE ag_login_id = %s""" TRN.add(sql, [ag_login_id]) info = TRN.execute_fetchindex() if not info: raise ValueError('ag_login_id not in database: %s' % ag_login_id) return info[0][0] def ut_get_participant_names_from_ag_login_id(self, ag_login_id): """ Returns all participant_name(s) for a given ag_login_id. For unit testing only! Parameters ---------- ag_login_id : str Existing ag_login_id. Returns ------- [[str]] Example: ["Name - z\xc3\x96DOZ8(Z~'", "Name - z\xc3\x96DOZ8(Z~'", 'Name - QpeY\xc3\xb8u#0\xc3\xa5<', 'Name - S)#@G]xOdL', 'Name - Y5"^&sGQiW', 'Name - L\xc3\xa7+c\r\xc3\xa5?\r\xc2\xbf!', 'Name - (~|w:S\xc3\x85#L\xc3\x84'] Raises ------ ValueError If ag_login_id is not in DB. """ with TRN: sql = """SELECT participant_name FROM ag.ag_login_surveys WHERE ag_login_id = %s""" TRN.add(sql, [ag_login_id]) info = TRN.execute_fetchindex() if not info: raise ValueError('ag_login_id not in database: %s' % ag_login_id) return [n[0] for n in info] def ut_get_barcode_from_ag_login_id(self, ag_login_id): """ Returns all barcodes for a given ag_login_id. For unit testing only! Parameters ---------- ag_login_id : str Existing ag_login_id. Returns ------- [dict(str, str)] Example: [{'sample_time': None, 'sample_date': None, 'barcode': '000004217', 'site_sampled': None, 'kit_verified': 'y'} Raises ------ ValueError If no barcodes can be found in the DB. """ with TRN: sql = """SELECT ag.ag_kit_barcodes.sample_time, ag.ag_kit_barcodes.barcode, ag.ag_kit_barcodes.sample_date, ag.ag_kit_barcodes.site_sampled, ag.ag_kit.kit_verified FROM ag.ag_kit_barcodes JOIN ag.ag_kit USING (ag_kit_id) WHERE ag_login_id = %s""" TRN.add(sql, [ag_login_id]) info = TRN.execute_fetchindex() if not info: raise ValueError('barcode not in database: %s' % ag_login_id) return [dict(row) for row in info] def ut_get_arbitrary_supplied_kit_id_unverified(self): """ Returns a randomly chosen supplied_kit_id that is unverified. For unit testing only! Returns ------- str: supplied_kit_id Example: 'FajNh' Raises ------ ValueError If no unverified supplied_kit_id can be found in the DB. """ with TRN: sql = """SELECT supplied_kit_id FROM ag.ag_kit WHERE ag.ag_kit.kit_verified = 'n' LIMIT 1""" TRN.add(sql, []) info = TRN.execute_fetchindex() if not info: raise ValueError('No unverified kits in DB') return info[0][0] def ut_get_ag_login_id_from_barcode(self, barcode): """ Returns ag_login_id for a given barcode. For unit testing only! Parameters ---------- barcode : str The barcode for which the ag_login_id should be retrieved. Returns ------- str: ag_login_id Example: 'd8592c74-9694-2135-e040-8a80115d6401' Raises ------ ValueError If the given barcode can not be found in the DB. """ with TRN: sql = """SELECT ag.ag_kit.ag_login_id FROM ag.ag_kit_barcodes JOIN ag.ag_kit USING (ag_kit_id) WHERE ag.ag_kit_barcodes.barcode = %s""" TRN.add(sql, [barcode]) info = TRN.execute_fetchindex() if not info: raise ValueError('Barcode "%s" not in DB' % barcode) return info[0][0]<|fim▁end|>
KIT_PASSWD = '1234567890' KIT_VERCODE = KIT_PASSWD KIT_PASSWD_NOZEROS = KIT_PASSWD[0:-1]
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[allow(dead_code)] use std::rc::Rc; use std::cell::RefCell; use std::fmt; use std::ops::{Drop, Deref, DerefMut}; use std::convert::{AsRef, AsMut}; use std::cmp::{Ord, PartialOrd, PartialEq, Eq, Ordering}; use std::hash::{Hash, Hasher}; use std::borrow::Borrow; use std::collections::VecDeque; use std::mem::ManuallyDrop; use std::ptr; /// In order to be managed by a `Pool`, values must be of a type that /// implements the `Recycleable` trait. This allows the `Pool` to create /// new instances as well as reset existing instances to a like-new state. pub trait Recycleable { /// Allocates a new instance of the implementing type. fn new() -> Self; /// Sets the state of the modified instance to be that of a freshly /// allocated instance, thereby allowing it to be reused. fn reset(&mut self); } /// Informs how an already allocated value should be initialized /// when provided with a model value or other meaningful input. pub trait InitializeWith<T> { fn initialize_with(&mut self, source: T); } impl Recycleable for String { #[inline] fn new() -> String { String::new() } #[inline] fn reset(&mut self) { self.clear(); } } impl <T> Recycleable for Vec<T> { #[inline] fn new() -> Vec<T> { Vec::new() } #[inline] fn reset(&mut self) { self.clear(); } } impl <T> Recycleable for VecDeque<T> { #[inline] fn new() -> VecDeque<T> { VecDeque::new() } #[inline] fn reset(&mut self) { self.clear(); } } impl <A> InitializeWith<A> for String where A : AsRef<str> { #[inline] fn initialize_with(&mut self, source: A) { let s : &str = source.as_ref(); self.push_str(s); } } impl <I, T> InitializeWith<I> for Vec<T> where I: Iterator<Item=T>{ #[inline] fn initialize_with(&mut self, source: I) { self.extend(source); } } /// A smartpointer which uses a shared reference (`&`) to know /// when to move its wrapped value back to the `Pool` that /// issued it. pub struct Recycled<'a, T: 'a> where T: Recycleable { value: RecycledInner<&'a RefCell<CappedCollection<T>>, T> } /// A smartpointer which uses reference counting (`Rc`) to know /// when to move its wrapped value back to the `Pool` that /// issued it. pub struct RcRecycled<T> where T: Recycleable { value: RecycledInner<Rc<RefCell<CappedCollection<T>>>, T> } macro_rules! impl_recycled { ($name: ident, $typ: ty, $pool: ty) => { impl <'a, T> AsRef<T> for $typ where T : Recycleable { /// Gets a shared reference to the value wrapped by the smartpointer. fn as_ref(&self) -> &T { self.value.as_ref() } } impl <'a, T> AsMut<T> for $typ where T : Recycleable { /// Gets a mutable reference to the value wrapped by the smartpointer. fn as_mut(&mut self) -> &mut T { self.value.as_mut() } } impl <'a, T> fmt::Debug for $typ where T : fmt::Debug + Recycleable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.value.fmt(f) } } impl <'a, T> fmt::Display for $typ where T : fmt::Display + Recycleable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.value.fmt(f) } } //-------- Passthrough trait implementations ----------- impl <'a, T> PartialEq for $typ where T : PartialEq + Recycleable { fn eq(&self, other: &Self) -> bool { self.value.eq(&other.value) } } impl <'a, T> Eq for $typ where T: Eq + Recycleable {} impl <'a, T> PartialOrd for $typ where T: PartialOrd + Recycleable { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.value.partial_cmp(&other.value) } } impl <'a, T> Ord for $typ where T: Ord + Recycleable { fn cmp(&self, other: &Self) -> Ordering { self.value.cmp(&other.value) } } impl <'a, T> Hash for $typ where T: Hash + Recycleable { fn hash<H: Hasher>(&self, state: &mut H) { self.value.hash(state) } } //------------------------------------------------------ impl <'a, T> Deref for $typ where T : Recycleable { type Target = T; #[inline] fn deref(&self) -> &T { self.as_ref() } } impl <'a, T> DerefMut for $typ where T : Recycleable { #[inline] fn deref_mut(&mut self) -> &mut T { self.as_mut() } } impl <'a, T> $typ where T: Recycleable { fn new(pool: $pool, value: T) -> $typ { $name { value: RecycledInner::new(pool, value) } } #[inline] fn new_from<A>(pool: $pool, value: T, source: A) -> $typ where T : InitializeWith<A> { $name { value: RecycledInner::new_from(pool, value, source) } } #[inline] /// Disassociates the value from the `Pool` that issued it. This /// destroys the smartpointer and returns the previously wrapped value. pub fn detach(self) -> T { self.value.detach() } } } } impl_recycled!{ RcRecycled, RcRecycled<T>, Rc<RefCell<CappedCollection<T>>> } impl_recycled!{ Recycled, Recycled<'a, T>, &'a RefCell<CappedCollection<T>> } impl <T> Clone for RcRecycled<T> where T: Clone + Recycleable { fn clone(&self) -> Self { RcRecycled { value: self.value.clone() } } } impl <'a, T> Clone for Recycled<'a, T> where T: Clone + Recycleable { fn clone(&self) -> Self { Recycled { value: self.value.clone() } } } struct RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { value: ManuallyDrop<T>, pool: P } // ---------- Passthrough Trait Implementations ------------ impl <P, T> PartialEq for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T: PartialEq + Recycleable { fn eq(&self, other: &Self) -> bool { self.value.eq(&other.value) } } impl <P, T> Eq for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T: Eq + Recycleable { } impl <P, T> PartialOrd for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T: PartialOrd + Recycleable { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.value.partial_cmp(&other.value) } } impl <P, T> Ord for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T: Ord + Recycleable { fn cmp(&self, other: &Self) -> Ordering { self.value.cmp(&other.value) } } impl <P, T> Hash for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T: Hash + Recycleable { fn hash<H: Hasher>(&self, state: &mut H) { self.value.hash(state) } } // Implementing Clone requires duplicating our shared reference to the capped collection, so we have // to provide separate implementations for RecycledInners used in Recycled and RcRecycled values. impl <'a, T> Clone for RecycledInner<&'a RefCell<CappedCollection<T>>, T> where T: Clone + Recycleable { fn clone(&self) -> Self { let pool_ref = &*self.pool; let mut cloned_value = pool_ref.borrow_mut().remove_or_create(); cloned_value.clone_from(&self.value); RecycledInner { value: ManuallyDrop::new(cloned_value), pool: pool_ref } } } impl <T> Clone for RecycledInner<Rc<RefCell<CappedCollection<T>>>, T> where T: Clone + Recycleable {<|fim▁hole|> cloned_value.clone_from(&self.value); RecycledInner { value: ManuallyDrop::new(cloned_value), pool: pool_ref } } } // ------------------------------------------------------------- impl <P, T> Drop for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { #[inline] fn drop(&mut self) { // We need to rescue `self.value` from `self` and then allow `self` to drop normally. let value = unsafe { // Make a byte-for-byte copy of `self.value` ptr::read(&self.value as *const ManuallyDrop<T>) // Because its type is ManuallyDrop<T>, the original value inside of `self` will not be // dropped when `self` drops. }; // Convert our newly-rescued `ManuallyDrop<T>` into a `T` so it will eventually drop normally. let mut value = ManuallyDrop::into_inner(value); // If there's no room left in the pool, drop the value here. let pool_ref = self.pool.borrow(); if pool_ref.borrow().is_full() { drop(value); return; } // Otherwise, reset value and return it to the pool. value.reset(); pool_ref.borrow_mut().insert_prepared_value(value); } } impl <P, T> AsRef<T> for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { fn as_ref(&self) -> &T { &self.value } } impl <P, T> AsMut<T> for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { fn as_mut(&mut self) -> &mut T { &mut self.value } } impl <P, T> fmt::Debug for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : fmt::Debug + Recycleable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.value.fmt(f) } } impl <P, T> fmt::Display for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : fmt::Display + Recycleable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.value.fmt(f) } } impl <P, T> Deref for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { type Target = T; #[inline] fn deref(& self) -> &T { self.as_ref() } } impl <P, T> DerefMut for RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { #[inline] fn deref_mut(&mut self) -> & mut T { self.as_mut() } } impl <P, T> RecycledInner<P, T> where P: Borrow<RefCell<CappedCollection<T>>>, T : Recycleable { #[inline] fn new(pool: P, value: T) -> RecycledInner<P, T> { RecycledInner { value: ManuallyDrop::new(value), pool } } #[inline] fn new_from<A>(pool: P, mut value: T, source: A) -> RecycledInner<P, T> where T : InitializeWith<A> { value.initialize_with(source); RecycledInner { value: ManuallyDrop::new(value), pool } } #[inline] fn detach(self) -> T { // We need to split `self` into its component `value` and `pool` fields, drop the pool, // return the value, and forget `self` since its fields are now unwanted clones. let value = unsafe { // Make a byte-for-byte copy of `self.value` ptr::read(&self.value as *const ManuallyDrop<T>) }; let pool = unsafe { // Make a byte-for-byte copy of `self.pool` ptr::read(&self.pool as *const P) }; // Forget `self` so it doesn't go through our custom `Drop` implementation mem::forget(self); // Allow `pool` to drop normally drop(pool); // Return the only surviving copy of `value` ManuallyDrop::into_inner(value) } } struct CappedCollection <T> where T: Recycleable { values: Vec<T>, cap: usize, supplier: Box<dyn Supply<Output=T>> } impl <T> CappedCollection <T> where T: Recycleable { #[inline] pub fn new(mut supplier: Box<dyn Supply<Output=T>>, starting_size: usize, max_size: usize) -> CappedCollection<T> { use std::cmp; let starting_size = cmp::min(starting_size, max_size); let values: Vec<T> = (0..starting_size) .map(|_| supplier.get() ) .collect(); CappedCollection { values: values, cap: max_size, supplier: supplier } } /// Note: This method does not perform a length check. /// The provided value must be reset() and there must be room in the pool before this is called. #[inline] pub fn insert_prepared_value(&mut self, value: T) { self.values.push(value) } #[inline] pub fn remove(&mut self) -> Option<T> { self.values.pop() } #[inline] pub fn remove_or_create(&mut self) -> T { match self.remove() { Some(value) => value, None => self.supplier.get() } } #[inline] pub fn is_full(&self) -> bool { self.values.len() >= self.cap } #[inline] pub fn len(&self) -> usize { self.values.len() } #[inline] pub fn cap(&self) -> usize { self.cap } } /// Provides a method which will produce new instances of a type pub trait Supply { type Output: Recycleable; fn get(&mut self) -> Self::Output; } impl <F, T> Supply for F where F: FnMut() -> T, T: Recycleable { type Output = T; fn get(&mut self) -> T { self() } } /// A collection of values that can be reused without requiring new allocations. /// /// `Pool` issues each value wrapped in a smartpointer. When the smartpointer goes out of /// scope, the wrapped value is automatically returned to the pool. pub struct Pool <T> where T : Recycleable { values: Rc<RefCell<CappedCollection<T>>>, } impl <T> Pool <T> where T: Recycleable { /// Creates a pool with `size` elements of type `T` allocated. #[inline] pub fn with_size(size: usize) -> Pool <T> { Pool::with_size_and_max(size, usize::MAX) } /// Creates a pool with `size` elements of type `T` allocated /// and sets a maximum pool size of `max_size`. Values being /// added to the pool via `Pool::attach` or being returned to /// the pool upon dropping will instead be discarded if the pool /// is full. #[inline] pub fn with_size_and_max(starting_size: usize, max_size: usize) -> Pool <T> { let supplier = Box::new(|| T::new()); let values: CappedCollection<T> = CappedCollection::new(supplier, starting_size, max_size); Pool { values: Rc::new(RefCell::new(values)) } } /// Returns the number of values remaining in the pool. #[inline] pub fn size(&self) -> usize { (*self.values).borrow().len() } /// Returns the maximum number of values the pool can hold. #[inline] pub fn max_size(&self) -> usize { (*self.values).borrow().cap() } /// Removes a value from the pool and returns it wrapped in /// a `Recycled smartpointer. If the pool is empty when the /// method is called, a new value will be allocated. #[inline] pub fn new(&self) -> Recycled<T> { let t = self.detached(); Recycled { value: RecycledInner::new(&*self.values, t) } } /// Removes a value from the pool, initializes it using the provided /// source value, and returns it wrapped in a `Recycled` smartpointer. /// If the pool is empty when the method is called, a new value will be /// allocated. #[inline(always)] pub fn new_from<A>(&self, source: A) -> Recycled<T> where T: InitializeWith<A> { let t = self.detached(); Recycled { value: RecycledInner::new_from(&*self.values, t, source) } } /// Associates the provided value with the pool by wrapping it in a /// `Recycled` smartpointer. #[inline] pub fn attach(&self, value: T) -> Recycled<T> { Recycled { value: RecycledInner::new(&*self.values, value) } } /// Removes a value from the pool and returns it without wrapping it in /// a smartpointer. When the value goes out of scope it will not be /// returned to the pool. #[inline] pub fn detached(&self) -> T { let mut collection = self.values.borrow_mut(); collection.remove_or_create() } /// Removes a value from the pool and returns it wrapped in /// an `RcRecycled` smartpointer. If the pool is empty when the /// method is called, a new value will be allocated. #[inline] pub fn new_rc(&self) -> RcRecycled<T> { let t = self.detached(); let pool_reference = self.values.clone(); RcRecycled { value: RecycledInner::new(pool_reference, t) } } /// Removes a value from the pool, initializes it using the provided /// source value, and returns it wrapped in an `RcRecycled` smartpointer. /// If the pool is empty when the method is called, a new value will be /// allocated. #[inline(always)] pub fn new_rc_from<A>(&self, source: A) -> RcRecycled<T> where T: InitializeWith<A> { let t = self.detached(); let pool_reference = self.values.clone(); RcRecycled { value: RecycledInner::new_from(pool_reference, t, source) } } /// Associates the provided value with the pool by wrapping it in an /// `RcRecycled` smartpointer. #[inline] pub fn attach_rc(&self, value: T) -> RcRecycled<T> { let pool_reference = self.values.clone(); RcRecycled { value: RecycledInner::new(pool_reference, value) } } } /// Produces a `PoolBuilder` instance /// /// # Example /// /// ``` /// extern crate lifeguard; /// use lifeguard::*; /// /// fn main() { /// let mut pool: Pool<String> = pool() /// .with(StartingSize(128)) /// .with(MaxSize(4096)) /// .with(Supplier(|| String::with_capacity(1024))) /// .build(); /// } /// ``` pub fn pool<T>() -> PoolBuilder<T> where T: Recycleable { PoolBuilder { starting_size: 16, max_size: usize::MAX, supplier: None } } /// Used to define settings for and ultimately create a `Pool`. pub struct PoolBuilder<T> where T: Recycleable { pub starting_size: usize, pub max_size: usize, pub supplier: Option<Box<dyn Supply<Output=T>>>, } impl <T> PoolBuilder<T> where T: Recycleable { pub fn with<U>(self, option_setter: U) -> PoolBuilder<T> where U: OptionSetter<PoolBuilder<T>> { option_setter.set_option(self) } pub fn build(self) -> Pool<T> where T: Recycleable { let supplier = self.supplier.unwrap_or(Box::new(|| T::new())); let values: CappedCollection<T> = CappedCollection::new(supplier, self.starting_size, self.max_size); Pool { values: Rc::new(RefCell::new(values)) } } } pub mod settings { use ::{PoolBuilder, Recycleable, Supply}; /// Implementing this trait allows a struct to act as a configuration /// parameter in the builder API. pub trait OptionSetter<T> { fn set_option(self, T) -> T; } /// Specifies how many values should be requested from the Supplier at /// initialization time. These values will be available for immediate use. pub struct StartingSize(pub usize); /// Specifies the largest number of values the `Pool` will hold before it /// will begin to drop values being returned to it. pub struct MaxSize(pub usize); /// Specifies a value implementing `Supply<Output=T>` that will be used to allocate /// new values. If unspecified, `T::new()` will be invoked. pub struct Supplier<S>(pub S) where S: Supply; impl <T> OptionSetter<PoolBuilder<T>> for StartingSize where T: Recycleable { fn set_option(self, mut builder: PoolBuilder<T>) -> PoolBuilder<T> { let StartingSize(size) = self; builder.starting_size = size; builder } } impl <T> OptionSetter<PoolBuilder<T>> for MaxSize where T: Recycleable { fn set_option(self, mut builder: PoolBuilder<T>) -> PoolBuilder<T> { let MaxSize(size) = self; builder.max_size = size; builder } } impl <T, S> OptionSetter<PoolBuilder<T>> for Supplier<S> where S: Supply<Output=T> + 'static, T: Recycleable { fn set_option(self, mut builder: PoolBuilder<T>) -> PoolBuilder<T> { let Supplier(supplier) = self; builder.supplier = Some(Box::new(supplier) as Box<dyn Supply<Output=T>>); builder } } } pub use settings::{OptionSetter, StartingSize, MaxSize, Supplier}; use std::mem;<|fim▁end|>
fn clone(&self) -> Self { let pool_ref = self.pool.clone(); let mut cloned_value = pool_ref.borrow_mut().remove_or_create();
<|file_name|>OrganizationNavigationMeta.tsx<|end_file_name|><|fim▁begin|>/* * SonarQube * Copyright (C) 2009-2019 SonarSource SA * mailto:info AT sonarsource DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ import * as React from 'react'; import HomePageSelect from '../../../components/controls/HomePageSelect'; import DocTooltip from '../../../components/docs/DocTooltip'; import { translate } from '../../../helpers/l10n'; import { isSonarCloud } from '../../../helpers/system'; import { hasPrivateAccess, isPaidOrganization } from '../../../helpers/organizations'; interface Props { currentUser: T.CurrentUser; organization: T.Organization; userOrganizations: T.Organization[]; } export default function OrganizationNavigationMeta({ currentUser, organization, userOrganizations }: Props) { const onSonarCloud = isSonarCloud(); return ( <div className="navbar-context-meta"> {organization.url != null && ( <a className="spacer-right text-limited" href={organization.url} rel="nofollow"<|fim▁hole|> title={organization.url}> {organization.url} </a> )} {onSonarCloud && isPaidOrganization(organization) && hasPrivateAccess(currentUser, organization, userOrganizations) && ( <DocTooltip className="spacer-right" doc={import(/* webpackMode: "eager" */ 'Docs/tooltips/organizations/subscription-paid-plan.md')}> <div className="outline-badge">{translate('organization.paid_plan.badge')}</div> </DocTooltip> )} <div className="text-muted"> <strong>{translate('organization.key')}:</strong> {organization.key} </div> {onSonarCloud && ( <div className="navbar-context-meta-secondary"> <HomePageSelect currentPage={{ type: 'ORGANIZATION', organization: organization.key }} /> </div> )} </div> ); }<|fim▁end|>
<|file_name|>router_spec.ts<|end_file_name|><|fim▁begin|>import { AsyncTestCompleter, describe, proxy, it, iit, ddescribe, expect, inject, beforeEach, beforeEachBindings } from 'angular2/test_lib'; import {SpyRouterOutlet} from './spies'; import {Type} from 'angular2/src/core/facade/lang'; import {Promise, PromiseWrapper, ObservableWrapper} from 'angular2/src/core/facade/async'; import {ListWrapper} from 'angular2/src/core/facade/collection'; import {Router, RootRouter} from 'angular2/src/router/router'; import {Pipeline} from 'angular2/src/router/pipeline'; import {SpyLocation} from 'angular2/src/mock/location_mock'; import {Location} from 'angular2/src/router/location'; import {stringifyInstruction} from 'angular2/src/router/instruction'; import {RouteRegistry} from 'angular2/src/router/route_registry'; import {RouteConfig, AsyncRoute, Route} from 'angular2/src/router/route_config_decorator'; import {DirectiveResolver} from 'angular2/src/core/compiler/directive_resolver'; import {bind} from 'angular2/di'; export function main() { describe('Router', () => { var router, location; beforeEachBindings(() => [ Pipeline, RouteRegistry, DirectiveResolver, bind(Location).toClass(SpyLocation), bind(Router) .toFactory((registry, pipeline, location) => { return new RootRouter(registry, pipeline, location, AppCmp); }, [RouteRegistry, Pipeline, Location]) ]); beforeEach(inject([Router, Location], (rtr, loc) => { router = rtr; location = loc; })); it('should navigate based on the initial URL state', inject([AsyncTestCompleter], (async) => { var outlet = makeDummyOutlet(); router.config([new Route({path: '/', component: DummyComponent})]) .then((_) => router.registerPrimaryOutlet(outlet)) .then((_) => { expect(outlet.spy('activate')).toHaveBeenCalled(); expect(location.urlChanges).toEqual([]); async.done(); }); })); it('should activate viewports and update URL on navigate', inject([AsyncTestCompleter], (async) => { var outlet = makeDummyOutlet(); router.registerPrimaryOutlet(outlet) .then((_) => router.config([new Route({path: '/a', component: DummyComponent})])) .then((_) => router.navigate('/a')) .then((_) => { expect(outlet.spy('activate')).toHaveBeenCalled(); expect(location.urlChanges).toEqual(['/a']); async.done(); }); })); it('should not push a history change on when navigate is called with skipUrlChange', inject([AsyncTestCompleter], (async) => { var outlet = makeDummyOutlet(); router.registerPrimaryOutlet(outlet) .then((_) => router.config([new Route({path: '/b', component: DummyComponent})])) .then((_) => router.navigate('/b', true)) .then((_) => { expect(outlet.spy('activate')).toHaveBeenCalled();<|fim▁hole|> async.done(); }); })); it('should navigate after being configured', inject([AsyncTestCompleter], (async) => { var outlet = makeDummyOutlet(); router.registerPrimaryOutlet(outlet) .then((_) => router.navigate('/a')) .then((_) => { expect(outlet.spy('activate')).not.toHaveBeenCalled(); return router.config([new Route({path: '/a', component: DummyComponent})]); }) .then((_) => { expect(outlet.spy('activate')).toHaveBeenCalled(); async.done(); }); })); it('should throw when linkParams does not start with a "/" or "./"', () => { expect(() => router.generate(['firstCmp', 'secondCmp'])) .toThrowError( `Link "${ListWrapper.toJSON(['firstCmp', 'secondCmp'])}" must start with "/", "./", or "../"`); }); it('should throw when linkParams does not include a route name', () => { expect(() => router.generate(['./'])) .toThrowError(`Link "${ListWrapper.toJSON(['./'])}" must include a route name.`); expect(() => router.generate(['/'])) .toThrowError(`Link "${ListWrapper.toJSON(['/'])}" must include a route name.`); }); it('should, when subscribed to, return a disposable subscription', () => { expect(() => { var subscription = router.subscribe((_) => {}); ObservableWrapper.dispose(subscription); }).not.toThrow(); }); it('should generate URLs from the root component when the path starts with /', () => { router.config([new Route({path: '/first/...', component: DummyParentComp, as: 'firstCmp'})]); var instruction = router.generate(['/firstCmp', 'secondCmp']); expect(stringifyInstruction(instruction)).toEqual('first/second'); instruction = router.generate(['/firstCmp/secondCmp']); expect(stringifyInstruction(instruction)).toEqual('first/second'); }); it('should generate an instruction with terminal async routes', inject([AsyncTestCompleter], (async) => { var outlet = makeDummyOutlet(); router.registerPrimaryOutlet(outlet); router.config([new AsyncRoute({path: '/first', loader: loader, as: 'FirstCmp'})]); var instruction = router.generate(['/FirstCmp']); router.navigateInstruction(instruction) .then((_) => { expect(outlet.spy('activate')).toHaveBeenCalled(); async.done(); }); })); it('should return whether a given instruction is active with isRouteActive', inject([AsyncTestCompleter], (async) => { var outlet = makeDummyOutlet(); router.registerPrimaryOutlet(outlet) .then((_) => router.config([ new Route({path: '/a', component: DummyComponent, as: 'A'}), new Route({path: '/b', component: DummyComponent, as: 'B'}) ])) .then((_) => router.navigate('/a')) .then((_) => { var instruction = router.generate(['/A']); var otherInstruction = router.generate(['/B']); expect(router.isRouteActive(instruction)).toEqual(true); expect(router.isRouteActive(otherInstruction)).toEqual(false); async.done(); }); })); describe('query string params', () => { it('should use query string params for the root route', () => { router.config( [new Route({path: '/hi/how/are/you', component: DummyComponent, as: 'greeting-url'})]); var instruction = router.generate(['/greeting-url', {'name': 'brad'}]); var path = stringifyInstruction(instruction); expect(path).toEqual('hi/how/are/you?name=brad'); }); it('should serialize parameters that are not part of the route definition as query string params', () => { router.config( [new Route({path: '/one/two/:three', component: DummyComponent, as: 'number-url'})]); var instruction = router.generate(['/number-url', {'three': 'three', 'four': 'four'}]); var path = stringifyInstruction(instruction); expect(path).toEqual('one/two/three?four=four'); }); }); describe('matrix params', () => { it('should generate matrix params for each non-root component', () => { router.config( [new Route({path: '/first/...', component: DummyParentComp, as: 'firstCmp'})]); var instruction = router.generate(['/firstCmp', {'key': 'value'}, 'secondCmp', {'project': 'angular'}]); var path = stringifyInstruction(instruction); expect(path).toEqual('first/second;project=angular?key=value'); }); it('should work with named params', () => { router.config( [new Route({path: '/first/:token/...', component: DummyParentComp, as: 'firstCmp'})]); var instruction = router.generate(['/firstCmp', {'token': 'min'}, 'secondCmp', {'author': 'max'}]); var path = stringifyInstruction(instruction); expect(path).toEqual('first/min/second;author=max'); }); }); }); } function loader(): Promise<Type> { return PromiseWrapper.resolve(DummyComponent); } class DummyComponent {} @RouteConfig([new Route({path: '/second', component: DummyComponent, as: 'secondCmp'})]) class DummyParentComp { } function makeDummyOutlet() { var ref = new SpyRouterOutlet(); ref.spy('canActivate').andCallFake((_) => PromiseWrapper.resolve(true)); ref.spy('canReuse').andCallFake((_) => PromiseWrapper.resolve(false)); ref.spy('canDeactivate').andCallFake((_) => PromiseWrapper.resolve(true)); ref.spy('activate').andCallFake((_) => PromiseWrapper.resolve(true)); return ref; } class AppCmp {}<|fim▁end|>
expect(location.urlChanges).toEqual([]);
<|file_name|>application.js<|end_file_name|><|fim▁begin|>// This is a manifest file that'll be compiled into application.js, which will include all the files<|fim▁hole|>// // Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts, // or vendor/assets/javascripts of plugins, if any, can be referenced here using a relative path. // // It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the // compiled file. // // Read Sprockets README (https://github.com/sstephenson/sprockets#sprockets-directives) for details // about supported directives. // //= require jquery //= require jquery_ujs //= require_tree .<|fim▁end|>
// listed below.
<|file_name|>AbsorbersPlugin.d.ts<|end_file_name|><|fim▁begin|>import type { IPlugin } from "../../Core/Interfaces/IPlugin"; import type { Container } from "../../Core/Container"; import { Absorbers } from "./Absorbers"; import { RecursivePartial } from "../../Types/RecursivePartial"; import type { IAbsorberOptions } from "./Options/Interfaces/IAbsorberOptions"; import type { IOptions } from "../../Options/Interfaces/IOptions"; import { Options } from "../../Options/Classes/Options"; declare class AbsorbersPlugin implements IPlugin {<|fim▁hole|> constructor(); getPlugin(container: Container): Absorbers; needsPlugin(options?: RecursivePartial<IOptions & IAbsorberOptions>): boolean; loadOptions(options: Options, source?: RecursivePartial<IOptions & IAbsorberOptions>): void; } declare const plugin: AbsorbersPlugin; export { IAbsorberOptions, plugin as AbsorbersPlugin }; export * from "./Enums";<|fim▁end|>
readonly id: string;
<|file_name|>tf_train_exponential_decay.py<|end_file_name|><|fim▁begin|>"""自适应学习率衰减 tf.train.exponential_decay(learning_rate, global_step, decay_steps, decay_rate, staircase=False, name=None) 退化学习率,衰减学习率,将指数衰减应用于学习速率。 计算公式:decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) """ # 初始的学习速率是0.1,总的迭代次数是1000次,如果staircase=True,那就表明每decay_steps次计算学习速率变化,更新原始学习速率, # 如果是False,那就是每一步都更新学习速率。红色表示False,蓝色表示True。<|fim▁hole|>import numpy as np import matplotlib.pyplot as plt learning_rate = 0.1 # 初始学习速率时0.1 decay_rate = 0.96 # 衰减率 global_steps = 1000 # 总的迭代次数 decay_steps = 100 # 衰减次数 global_ = tf.Variable(tf.constant(0)) c = tf.train.exponential_decay(learning_rate, global_, decay_steps, decay_rate, staircase=True) d = tf.train.exponential_decay(learning_rate, global_, decay_steps, decay_rate, staircase=False) T_C = [] F_D = [] with tf.Session() as sess: for i in range(global_steps): T_c = sess.run(c, feed_dict={global_: i}) T_C.append(T_c) F_d = sess.run(d, feed_dict={global_: i}) F_D.append(F_d) plt.figure(1) plt.plot(range(global_steps), F_D, 'r-')# "-"表示折线图,r表示红色,b表示蓝色 plt.plot(range(global_steps), T_C, 'b-') # 关于函数的值的计算0.96^(3/1000)=0.998 plt.show()<|fim▁end|>
import tensorflow as tf
<|file_name|>matrix_inverse_op_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.math_ops.matrix_inverse.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.client import session from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import variables from tensorflow.python.platform import benchmark from tensorflow.python.platform import test class InverseOpTest(test.TestCase): def _verifyInverse(self, x, np_type): for adjoint in False, True: y = x.astype(np_type) with self.cached_session(use_gpu=True): # Verify that x^{-1} * x == Identity matrix. inv = linalg_ops.matrix_inverse(y, adjoint=adjoint) tf_ans = test_util.matmul_without_tf32(inv, y, adjoint_b=adjoint) np_ans = np.identity(y.shape[-1]) if x.ndim > 2: tiling = list(y.shape) tiling[-2:] = [1, 1] np_ans = np.tile(np_ans, tiling) out = self.evaluate(tf_ans) self.assertAllClose(np_ans, out, rtol=1e-4, atol=1e-3) self.assertShapeEqual(y, tf_ans) def _verifyInverseReal(self, x): for np_type in [np.float32, np.float64]: self._verifyInverse(x, np_type) def _verifyInverseComplex(self, x): for np_type in [np.complex64, np.complex128]: self._verifyInverse(x, np_type) def _makeBatch(self, matrix1, matrix2): matrix_batch = np.concatenate( [np.expand_dims(matrix1, 0), np.expand_dims(matrix2, 0)]) matrix_batch = np.tile(matrix_batch, [2, 3, 1, 1]) return matrix_batch def testNonsymmetric(self): # 2x2 matrices matrix1 = np.array([[1., 2.], [3., 4.]]) matrix2 = np.array([[1., 3.], [3., 5.]]) self._verifyInverseReal(matrix1) self._verifyInverseReal(matrix2) # A multidimensional batch of 2x2 matrices self._verifyInverseReal(self._makeBatch(matrix1, matrix2)) matrix1 = matrix1.astype(np.complex64) matrix1 += 1j * matrix1 matrix2 = matrix2.astype(np.complex64) matrix2 += 1j * matrix2 self._verifyInverseComplex(matrix1) self._verifyInverseComplex(matrix2) # Complex batch self._verifyInverseComplex(self._makeBatch(matrix1, matrix2)) def testSymmetricPositiveDefinite(self): # 2x2 matrices matrix1 = np.array([[2., 1.], [1., 2.]]) matrix2 = np.array([[3., -1.], [-1., 3.]]) self._verifyInverseReal(matrix1) self._verifyInverseReal(matrix2) # A multidimensional batch of 2x2 matrices self._verifyInverseReal(self._makeBatch(matrix1, matrix2)) matrix1 = matrix1.astype(np.complex64) matrix1 += 1j * matrix1 matrix2 = matrix2.astype(np.complex64) matrix2 += 1j * matrix2 self._verifyInverseComplex(matrix1) self._verifyInverseComplex(matrix2) # Complex batch self._verifyInverseComplex(self._makeBatch(matrix1, matrix2)) @test_util.deprecated_graph_mode_only def testNonSquareMatrix(self): # When the inverse of a non-square matrix is attempted we should return # an error with self.assertRaises(ValueError): linalg_ops.matrix_inverse(np.array([[1., 2., 3.], [3., 4., 5.]])) @test_util.deprecated_graph_mode_only def testWrongDimensions(self): # The input to the inverse should be at least a 2-dimensional tensor. tensor3 = constant_op.constant([1., 2.]) with self.assertRaises(ValueError): linalg_ops.matrix_inverse(tensor3) def testNotInvertible(self): # The input should be invertible. with self.cached_session(): with self.assertRaisesOpError("Input is not invertible."): # All rows of the matrix below add to zero. tensor3 = constant_op.constant([[1., 0., -1.], [-1., 1., 0.], [0., -1., 1.]]) linalg_ops.matrix_inverse(tensor3).eval() def testEmpty(self): self._verifyInverseReal(np.empty([0, 2, 2])) self._verifyInverseReal(np.empty([2, 0, 0])) def testRandomSmallAndLarge(self): np.random.seed(42) for dtype in np.float32, np.float64, np.complex64, np.complex128: for batch_dims in [(), (1,), (3,), (2, 2)]: for size in 8, 31, 32: shape = batch_dims + (size, size) matrix = np.random.uniform( low=-1.0, high=1.0,<|fim▁hole|> def testConcurrentExecutesWithoutError(self): with self.session(use_gpu=True) as sess: all_ops = [] for adjoint_ in True, False: matrix1 = random_ops.random_normal([5, 5], seed=42) matrix2 = random_ops.random_normal([5, 5], seed=42) inv1 = linalg_ops.matrix_inverse(matrix1, adjoint=adjoint_) inv2 = linalg_ops.matrix_inverse(matrix2, adjoint=adjoint_) all_ops += [inv1, inv2] inv = self.evaluate(all_ops) self.assertAllEqual(inv[0], inv[1]) self.assertAllEqual(inv[2], inv[3]) class MatrixInverseBenchmark(test.Benchmark): shapes = [ (4, 4), (10, 10), (16, 16), (101, 101), (256, 256), (1000, 1000), (1024, 1024), (2048, 2048), (513, 4, 4), (513, 16, 16), (513, 256, 256), ] def _GenerateMatrix(self, shape): batch_shape = shape[:-2] shape = shape[-2:] assert shape[0] == shape[1] n = shape[0] matrix = np.ones(shape).astype(np.float32) / ( 2.0 * n) + np.diag(np.ones(n).astype(np.float32)) return variables.Variable(np.tile(matrix, batch_shape + (1, 1))) def benchmarkMatrixInverseOp(self): for adjoint in False, True: for shape in self.shapes: with ops.Graph().as_default(), \ session.Session(config=benchmark.benchmark_config()) as sess, \ ops.device("/cpu:0"): matrix = self._GenerateMatrix(shape) inv = linalg_ops.matrix_inverse(matrix, adjoint=adjoint) self.evaluate(variables.global_variables_initializer()) self.run_op_benchmark( sess, control_flow_ops.group(inv), min_iters=25, name="matrix_inverse_cpu_{shape}_adjoint_{adjoint}".format( shape=shape, adjoint=adjoint)) if test.is_gpu_available(True): with ops.Graph().as_default(), \ session.Session(config=benchmark.benchmark_config()) as sess, \ ops.device("/gpu:0"): matrix = self._GenerateMatrix(shape) inv = linalg_ops.matrix_inverse(matrix, adjoint=adjoint) self.evaluate(variables.global_variables_initializer()) self.run_op_benchmark( sess, control_flow_ops.group(inv), min_iters=25, name="matrix_inverse_gpu_{shape}_adjoint_{adjoint}".format( shape=shape, adjoint=adjoint)) if __name__ == "__main__": test.main()<|fim▁end|>
size=np.prod(shape)).reshape(shape).astype(dtype) self._verifyInverseReal(matrix) @test_util.deprecated_graph_mode_only
<|file_name|>fd_unix.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build darwin dragonfly freebsd linux nacl netbsd openbsd solaris package net import ( "io" "os" "runtime" "sync/atomic" "syscall" "time" ) // Network file descriptor. type netFD struct { // locking/lifetime of sysfd + serialize access to Read and Write methods fdmu fdMutex // immutable until Close sysfd int family int sotype int isConnected bool net string laddr Addr raddr Addr // wait server pd pollDesc } func sysInit() { } func dial(network string, ra Addr, dialer func(time.Time) (Conn, error), deadline time.Time) (Conn, error) { return dialer(deadline) } func newFD(sysfd, family, sotype int, net string) (*netFD, error) { return &netFD{sysfd: sysfd, family: family, sotype: sotype, net: net}, nil } func (fd *netFD) init() error { if err := fd.pd.Init(fd); err != nil { return err } return nil } func (fd *netFD) setAddr(laddr, raddr Addr) { fd.laddr = laddr fd.raddr = raddr runtime.SetFinalizer(fd, (*netFD).Close) } func (fd *netFD) name() string { var ls, rs string if fd.laddr != nil { ls = fd.laddr.String() } if fd.raddr != nil { rs = fd.raddr.String() } return fd.net + ":" + ls + "->" + rs } func (fd *netFD) connect(la, ra syscall.Sockaddr) error { // Do not need to call fd.writeLock here, // because fd is not yet accessible to user, // so no concurrent operations are possible. if err := fd.pd.PrepareWrite(); err != nil { return err } switch err := syscall.Connect(fd.sysfd, ra); err { case syscall.EINPROGRESS, syscall.EALREADY, syscall.EINTR: case nil, syscall.EISCONN: return nil case syscall.EINVAL: // On Solaris we can see EINVAL if the socket has // already been accepted and closed by the server. // Treat this as a successful connection--writes to // the socket will see EOF. For details and a test // case in C see http://golang.org/issue/6828. if runtime.GOOS == "solaris" { return nil } fallthrough default: return err } for { // Performing multiple connect system calls on a // non-blocking socket under Unix variants does not // necessarily result in earlier errors being // returned. Instead, once runtime-integrated network // poller tells us that the socket is ready, get the // SO_ERROR socket option to see if the connection // succeeded or failed. See issue 7474 for further // details. if err := fd.pd.WaitWrite(); err != nil { return err } nerr, err := syscall.GetsockoptInt(fd.sysfd, syscall.SOL_SOCKET, syscall.SO_ERROR) if err != nil { return err } switch err := syscall.Errno(nerr); err { case syscall.EINPROGRESS, syscall.EALREADY, syscall.EINTR: case syscall.Errno(0), syscall.EISCONN: return nil default: return err } } } func (fd *netFD) destroy() { // Poller may want to unregister fd in readiness notification mechanism, // so this must be executed before closesocket. fd.pd.Close() closesocket(fd.sysfd) fd.sysfd = -1 runtime.SetFinalizer(fd, nil) } // Add a reference to this fd. // Returns an error if the fd cannot be used. func (fd *netFD) incref() error { if !fd.fdmu.Incref() { return errClosing } return nil } // Remove a reference to this FD and close if we've been asked to do so // (and there are no references left). func (fd *netFD) decref() { if fd.fdmu.Decref() { fd.destroy() } } // Add a reference to this fd and lock for reading. // Returns an error if the fd cannot be used. func (fd *netFD) readLock() error { if !fd.fdmu.RWLock(true) { return errClosing } return nil } // Unlock for reading and remove a reference to this FD. func (fd *netFD) readUnlock() { if fd.fdmu.RWUnlock(true) { fd.destroy() } } // Add a reference to this fd and lock for writing. // Returns an error if the fd cannot be used. func (fd *netFD) writeLock() error { if !fd.fdmu.RWLock(false) { return errClosing } return nil } // Unlock for writing and remove a reference to this FD. func (fd *netFD) writeUnlock() { if fd.fdmu.RWUnlock(false) { fd.destroy() } } func (fd *netFD) Close() error { fd.pd.Lock() // needed for both fd.incref(true) and pollDesc.Evict if !fd.fdmu.IncrefAndClose() { fd.pd.Unlock() return errClosing } // Unblock any I/O. Once it all unblocks and returns, // so that it cannot be referring to fd.sysfd anymore, // the final decref will close fd.sysfd. This should happen // fairly quickly, since all the I/O is non-blocking, and any // attempts to block in the pollDesc will return errClosing. doWakeup := fd.pd.Evict() fd.pd.Unlock() fd.decref() if doWakeup { fd.pd.Wakeup() } return nil } func (fd *netFD) shutdown(how int) error { if err := fd.incref(); err != nil { return err } defer fd.decref() err := syscall.Shutdown(fd.sysfd, how) if err != nil { return &OpError{"shutdown", fd.net, fd.laddr, err} } return nil } func (fd *netFD) closeRead() error { return fd.shutdown(syscall.SHUT_RD) } func (fd *netFD) closeWrite() error { return fd.shutdown(syscall.SHUT_WR) } func (fd *netFD) Read(p []byte) (n int, err error) { if err := fd.readLock(); err != nil { return 0, err } defer fd.readUnlock() if err := fd.pd.PrepareRead(); err != nil { return 0, &OpError{"read", fd.net, fd.raddr, err} } for { n, err = syscall.Read(int(fd.sysfd), p) if err != nil { n = 0 if err == syscall.EAGAIN { if err = fd.pd.WaitRead(); err == nil { continue } } } err = chkReadErr(n, err, fd) break } if err != nil && err != io.EOF { err = &OpError{"read", fd.net, fd.raddr, err} } return } func (fd *netFD) readFrom(p []byte) (n int, sa syscall.Sockaddr, err error) { if err := fd.readLock(); err != nil { return 0, nil, err } defer fd.readUnlock() if err := fd.pd.PrepareRead(); err != nil { return 0, nil, &OpError{"read", fd.net, fd.laddr, err} } for { n, sa, err = syscall.Recvfrom(fd.sysfd, p, 0) if err != nil { n = 0 if err == syscall.EAGAIN { if err = fd.pd.WaitRead(); err == nil { continue } } } err = chkReadErr(n, err, fd) break } if err != nil && err != io.EOF { err = &OpError{"read", fd.net, fd.laddr, err} } return } func (fd *netFD) readMsg(p []byte, oob []byte) (n, oobn, flags int, sa syscall.Sockaddr, err error) { if err := fd.readLock(); err != nil { return 0, 0, 0, nil, err } defer fd.readUnlock() if err := fd.pd.PrepareRead(); err != nil { return 0, 0, 0, nil, &OpError{"read", fd.net, fd.laddr, err} } for { n, oobn, flags, sa, err = syscall.Recvmsg(fd.sysfd, p, oob, 0) if err != nil { // TODO(dfc) should n and oobn be set to 0 if err == syscall.EAGAIN { if err = fd.pd.WaitRead(); err == nil { continue } } } err = chkReadErr(n, err, fd) break } if err != nil && err != io.EOF { err = &OpError{"read", fd.net, fd.laddr, err} } return } func chkReadErr(n int, err error, fd *netFD) error { if n == 0 && err == nil && fd.sotype != syscall.SOCK_DGRAM && fd.sotype != syscall.SOCK_RAW { return io.EOF } return err } func (fd *netFD) Write(p []byte) (nn int, err error) { if err := fd.writeLock(); err != nil { return 0, err } defer fd.writeUnlock() if err := fd.pd.PrepareWrite(); err != nil { return 0, &OpError{"write", fd.net, fd.raddr, err} } for { var n int n, err = syscall.Write(int(fd.sysfd), p[nn:]) if n > 0 { nn += n } if nn == len(p) { break } if err == syscall.EAGAIN { if err = fd.pd.WaitWrite(); err == nil { continue } } if err != nil { n = 0 break } if n == 0 { err = io.ErrUnexpectedEOF break } } if err != nil { err = &OpError{"write", fd.net, fd.raddr, err} } return nn, err } func (fd *netFD) writeTo(p []byte, sa syscall.Sockaddr) (n int, err error) { if err := fd.writeLock(); err != nil { return 0, err } defer fd.writeUnlock() if err := fd.pd.PrepareWrite(); err != nil { return 0, &OpError{"write", fd.net, fd.raddr, err} } for { err = syscall.Sendto(fd.sysfd, p, 0, sa) if err == syscall.EAGAIN { if err = fd.pd.WaitWrite(); err == nil { continue } } break } if err == nil { n = len(p) } else { err = &OpError{"write", fd.net, fd.raddr, err} } return } func (fd *netFD) writeMsg(p []byte, oob []byte, sa syscall.Sockaddr) (n int, oobn int, err error) { if err := fd.writeLock(); err != nil { return 0, 0, err } defer fd.writeUnlock() if err := fd.pd.PrepareWrite(); err != nil { return 0, 0, &OpError{"write", fd.net, fd.raddr, err} } for { n, err = syscall.SendmsgN(fd.sysfd, p, oob, sa, 0) if err == syscall.EAGAIN { if err = fd.pd.WaitWrite(); err == nil { continue } } break } if err == nil { oobn = len(oob) } else { err = &OpError{"write", fd.net, fd.raddr, err} } return } func (fd *netFD) accept(toAddr func(syscall.Sockaddr) Addr) (netfd *netFD, err error) { if err := fd.readLock(); err != nil { return nil, err } defer fd.readUnlock() var s int var rsa syscall.Sockaddr if err = fd.pd.PrepareRead(); err != nil { return nil, &OpError{"accept", fd.net, fd.laddr, err} } for { s, rsa, err = accept(fd.sysfd)<|fim▁hole|> if err == syscall.EAGAIN { if err = fd.pd.WaitRead(); err == nil { continue } } else if err == syscall.ECONNABORTED { // This means that a socket on the listen queue was closed // before we Accept()ed it; it's a silly error, so try again. continue } return nil, &OpError{"accept", fd.net, fd.laddr, err} } break } if netfd, err = newFD(s, fd.family, fd.sotype, fd.net); err != nil { closesocket(s) return nil, err } if err = netfd.init(); err != nil { fd.Close() return nil, err } lsa, _ := syscall.Getsockname(netfd.sysfd) netfd.setAddr(toAddr(lsa), toAddr(rsa)) return netfd, nil } // tryDupCloexec indicates whether F_DUPFD_CLOEXEC should be used. // If the kernel doesn't support it, this is set to 0. var tryDupCloexec = int32(1) func dupCloseOnExec(fd int) (newfd int, err error) { if atomic.LoadInt32(&tryDupCloexec) == 1 { r0, _, e1 := syscall.Syscall(syscall.SYS_FCNTL, uintptr(fd), syscall.F_DUPFD_CLOEXEC, 0) if runtime.GOOS == "darwin" && e1 == syscall.EBADF { // On OS X 10.6 and below (but we only support // >= 10.6), F_DUPFD_CLOEXEC is unsupported // and fcntl there falls back (undocumented) // to doing an ioctl instead, returning EBADF // in this case because fd is not of the // expected device fd type. Treat it as // EINVAL instead, so we fall back to the // normal dup path. // TODO: only do this on 10.6 if we can detect 10.6 // cheaply. e1 = syscall.EINVAL } switch e1 { case 0: return int(r0), nil case syscall.EINVAL: // Old kernel. Fall back to the portable way // from now on. atomic.StoreInt32(&tryDupCloexec, 0) default: return -1, e1 } } return dupCloseOnExecOld(fd) } // dupCloseOnExecUnixOld is the traditional way to dup an fd and // set its O_CLOEXEC bit, using two system calls. func dupCloseOnExecOld(fd int) (newfd int, err error) { syscall.ForkLock.RLock() defer syscall.ForkLock.RUnlock() newfd, err = syscall.Dup(fd) if err != nil { return -1, err } syscall.CloseOnExec(newfd) return } func (fd *netFD) dup() (f *os.File, err error) { ns, err := dupCloseOnExec(fd.sysfd) if err != nil { return nil, &OpError{"dup", fd.net, fd.laddr, err} } // We want blocking mode for the new fd, hence the double negative. // This also puts the old fd into blocking mode, meaning that // I/O will block the thread instead of letting us use the epoll server. // Everything will still work, just with more threads. if err = syscall.SetNonblock(ns, false); err != nil { return nil, &OpError{"setnonblock", fd.net, fd.laddr, err} } return os.NewFile(uintptr(ns), fd.name()), nil } func closesocket(s int) error { return syscall.Close(s) } func skipRawSocketTests() (skip bool, skipmsg string, err error) { if os.Getuid() != 0 { return true, "skipping test; must be root", nil } return false, "", nil }<|fim▁end|>
if err != nil {
<|file_name|>chunking.rs<|end_file_name|><|fim▁begin|>use serde::Deserialize; use std::{ fmt, fs::File, io::{ BufReader, BufWriter, Read, Write, }, ops::Range, path::{ Path, PathBuf, }, }; use anyhow::{ Context, Result, }; use serde::{ de::DeserializeOwned, ser::Serialize, }; use csv::Reader; use itertools::Itertools; use crate::psql::PsqlJsonIterator; pub struct SingleChunk { endpoints: Range<usize>, filename: String, } #[derive(Debug, Deserialize)] pub struct RawChunk { filename: String, start: usize, stop: usize, } pub struct ChunkSpec { ranges: Vec<SingleChunk>, } impl From<RawChunk> for SingleChunk { fn from(raw: RawChunk) -> SingleChunk { return Self { endpoints: Range { start: raw.start, end: raw.stop + 1, }, filename: raw.filename, }; } } impl SingleChunk { pub fn contains(&self, index: &usize) -> bool { self.endpoints.contains(index) } } impl ChunkSpec { pub fn filename_of(&self, index: usize) -> Option<PathBuf> { self.ranges .iter() .find(|r| r.contains(&index)) .map(|c: &SingleChunk| PathBuf::from(&c.filename)) } } pub fn load(filename: &Path) -> Result<ChunkSpec> { let file = File::open(filename) .with_context(|| format!("Could not open chunk file {:?}", &filename))?; let reader = BufReader::new(file); let mut records = Reader::from_reader(reader); let mut ranges = Vec::new(); for result in records.deserialize() { let entry: RawChunk = result?; ranges.push(SingleChunk::from(entry)); } Ok(ChunkSpec { ranges, }) } pub fn write_splits<R: Read, T: DeserializeOwned + fmt::Debug + Serialize>( iterator: PsqlJsonIterator<R, T>, id_getter: fn(&T) -> usize, chunks: &Path, output: &Path, ) -> Result<()> { let chunks = load(chunks)?; let grouped = iterator.group_by(|entry: &T| { chunks .filename_of(id_getter(entry)) .ok_or(format!("Could not find filename for {:?}", &entry)) .unwrap() }); for (filename, items) in &grouped { let mut path = PathBuf::from(output); path.push(filename); let file = File::create(path)?; let mut writer = BufWriter::new(file); for item in items {<|fim▁hole|> } Ok(()) }<|fim▁end|>
serde_json::to_writer(&mut writer, &item)?; writeln!(&mut writer)?; }
<|file_name|>containerRegistry.test.ts<|end_file_name|><|fim▁begin|>// ------------------------------------------------------------------------------ // Copyright (c) 2017-present, RobotlegsJS. All Rights Reserved. // // NOTICE: You are permitted to use, modify, and distribute this file // in accordance with the terms of the license agreement accompanying it. // ------------------------------------------------------------------------------ import "../../../../../entry"; import "../../../../../../src/robotlegs/bender/extensions/contextView/pixiPatch/contains-patch"; import { assert } from "chai"; import { Sprite } from "pixi.js"; import { IViewHandler } from "../../../../../../src/robotlegs/bender/extensions/viewManager/api/IViewHandler"; import { ContainerBinding } from "../../../../../../src/robotlegs/bender/extensions/viewManager/impl/ContainerBinding"; import { ContainerRegistry } from "../../../../../../src/robotlegs/bender/extensions/viewManager/impl/ContainerRegistry"; import { ContainerRegistryEvent } from "../../../../../../src/robotlegs/bender/extensions/viewManager/impl/ContainerRegistryEvent"; import { CallbackViewHandler } from "../support/CallbackViewHandler"; import { TreeContainer } from "../support/TreeContainer"; describe("ContainerRegistry", () => { let registry: ContainerRegistry = null; beforeEach(() => { registry = new ContainerRegistry(); }); afterEach(() => { registry = null; }); it("add_container", () => { let container: Sprite = new Sprite(); let containerBinding: ContainerBinding = registry.addContainer(container); assert.equal(containerBinding.container, container); }); it("add_twice_same_container", () => { let container: Sprite = new Sprite(); let containerBinding1: ContainerBinding = registry.addContainer(container); let containerBinding2: ContainerBinding = registry.addContainer(container); assert.equal(containerBinding1.container, container); assert.equal(containerBinding2.container, container); assert.equal(containerBinding1, containerBinding2); }); it("get_bindings", () => { let container1: Sprite = new Sprite(); let container2: Sprite = new Sprite(); let container3: Sprite = new Sprite(); let containerBinding1: ContainerBinding = registry.addContainer(container1); let containerBinding2: ContainerBinding = registry.addContainer(container2); let containerBinding3: ContainerBinding = registry.addContainer(container3); let expectedBindings: ContainerBinding[] = [containerBinding1, containerBinding2, containerBinding3]; assert.deepEqual(expectedBindings, registry.bindings); }); it("finds_correct_nearest_interested_container_view_and_returns_its_binding", () => { let searchTrees: TreeContainer[] = createTrees(3, 3); for (let searchTree of searchTrees) { registry.addContainer(searchTree); } let correctTree: TreeContainer; let result: ContainerBinding; for (correctTree of searchTrees) { for (let treeChild of correctTree.treeChildren) { result = registry.findParentBinding(treeChild); assert.equal(result.container, correctTree); for (let treeGrandchild of treeChild.treeChildren) { result = registry.findParentBinding(treeGrandchild); assert.equal(result.container, correctTree); for (let treeGreatGrandchild of treeGrandchild.treeChildren) { result = registry.findParentBinding(treeGreatGrandchild); assert.equal(result.container, correctTree); } } } } }); it("binding_returns_with_correct_interested_parent_chain", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1]); registry.addContainer(searchTrees[1].treeChildren[3]); let searchItem: Sprite = searchTrees[1].treeChildren[3].treeChildren[3].treeChildren[3].treeChildren[3]; let result: ContainerBinding = registry.findParentBinding(searchItem); assert.equal(searchTrees[1].treeChildren[3], result.container, "Binding returns with correct container view"); assert.equal(searchTrees[1], result.parent.container, "Binding returns with correct container parent view"); assert.equal(null, result.parent.parent, "Further parents are null"); });<|fim▁hole|> it("binding_returns_with_correct_interested_parent_chain_if_interested_views_added_in_wrong_order", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1].treeChildren[3]); registry.addContainer(searchTrees[1]); let searchItem: Sprite = searchTrees[1].treeChildren[3].treeChildren[3].treeChildren[3].treeChildren[3]; let result: ContainerBinding = registry.findParentBinding(searchItem); assert.equal(searchTrees[1].treeChildren[3], result.container, "Binding returns with correct container view"); assert.equal(searchTrees[1], result.parent.container, "Binding returns with correct container parent view"); assert.equal(null, result.parent.parent, "Further parents are null"); }); it("binding_returns_with_correct_interested_parent_chain_if_interested_views_added_in_wrong_order_with_gaps", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2]); registry.addContainer(searchTrees[1]); let searchItem: Sprite = searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3].treeChildren[3]; let result: ContainerBinding = registry.findParentBinding(searchItem); assert.equal(searchTrees[1].treeChildren[3].treeChildren[2], result.container, "Binding returns with correct container view"); assert.equal(searchTrees[1], result.parent.container, "Binding returns with correct container parent view"); assert.equal(null, result.parent.parent, "Further parents are null"); }); it("binding_returns_with_correct_interested_parent_chain_after_removal", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2]); registry.addContainer(searchTrees[1].treeChildren[3]); registry.removeContainer(searchTrees[1].treeChildren[3].treeChildren[2]); let searchItem: Sprite = searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3].treeChildren[3]; let result: ContainerBinding = registry.findParentBinding(searchItem); assert.equal( searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3], result.container, "Binding returns with correct container view" ); assert.equal(searchTrees[1].treeChildren[3], result.parent.container, "Binding returns with correct container parent view"); assert.equal(searchTrees[1], result.parent.parent.container, "Binding returns with correct container parent parent view"); assert.equal(null, result.parent.parent.parent, "Further parents are null"); }); it("returns_null_if_search_item_is_not_inside_an_included_view", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2]); registry.addContainer(searchTrees[1].treeChildren[3]); registry.removeContainer(searchTrees[1].treeChildren[3].treeChildren[2]); let searchItem: Sprite = searchTrees[2].treeChildren[3].treeChildren[2].treeChildren[3].treeChildren[3]; let result: ContainerBinding = registry.findParentBinding(searchItem); assert.equal(null, result, "Returns null if not inside an included view"); }); it("returns_root_container_view_bindings_one_item", () => { let searchTrees: TreeContainer[] = createTrees(1, 1); let expectedBinding: ContainerBinding = registry.addContainer(searchTrees[0]); let expectedRootBindings: ContainerBinding[] = [expectedBinding]; assert.deepEqual(expectedRootBindings, registry.rootBindings, "Returns root container view bindings one item"); }); it("returns_root_container_view_bindings_many_items", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); let firstExpectedBinding: ContainerBinding = registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2]); let secondExpectedBinding: ContainerBinding = registry.addContainer(searchTrees[1]); registry.addContainer(searchTrees[1].treeChildren[3]); let expectedRootBindings: ContainerBinding[] = [firstExpectedBinding, secondExpectedBinding]; assert.deepEqual(expectedRootBindings, registry.rootBindings, "Returns root container view bindings many items"); }); it("returns_root_container_view_bindings_many_items_after_removals", () => { let searchTrees: TreeContainer[] = createTrees(5, 4); let firstExpectedBinding: ContainerBinding = registry.addContainer(searchTrees[0]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2].treeChildren[3]); registry.addContainer(searchTrees[1].treeChildren[3].treeChildren[2]); registry.addContainer(searchTrees[1]); let secondExpectedBinding: ContainerBinding = registry.addContainer(searchTrees[1].treeChildren[3]); registry.removeContainer(searchTrees[1]); let expectedRootBindings: ContainerBinding[] = [firstExpectedBinding, secondExpectedBinding]; assert.deepEqual(expectedRootBindings, registry.rootBindings, "Returns root container view bindings many items after removals"); }); it("adding_container_dispatches_event", () => { let container: Sprite = new Sprite(); let callCount: number = 0; registry.addEventListener(ContainerRegistryEvent.CONTAINER_ADD, function onContainerAdd(event: ContainerRegistryEvent): void { callCount++; }); registry.addContainer(container); registry.addContainer(container); assert.equal(callCount, 1); }); it("removing_container_dispatches_event", () => { let container: Sprite = new Sprite(); let callCount: number = 0; registry.addEventListener(ContainerRegistryEvent.CONTAINER_REMOVE, function onContainerRemove(event: ContainerRegistryEvent): void { callCount++; }); registry.addContainer(container); registry.removeContainer(container); registry.removeContainer(container); assert.equal(callCount, 1); }); it("adding_root_container_dispatches_event", () => { let container: Sprite = new Sprite(); let callCount: number = 0; registry.addEventListener(ContainerRegistryEvent.ROOT_CONTAINER_ADD, function onRootContainerAdd( event: ContainerRegistryEvent ): void { callCount++; }); registry.addContainer(container); assert.equal(callCount, 1); }); it("removing_root_container_dispatches_event", () => { let container: Sprite = new Sprite(); let callCount: number = 0; registry.addEventListener(ContainerRegistryEvent.ROOT_CONTAINER_REMOVE, function onRootContainerRemove( event: ContainerRegistryEvent ): void { callCount++; }); registry.addContainer(container); registry.removeContainer(container); assert.equal(callCount, 1); }); it("empty_binding_is_removed", () => { let container: Sprite = new Sprite(); let handler: IViewHandler = new CallbackViewHandler(); registry.addContainer(container).addHandler(handler); registry.getBinding(container).removeHandler(handler); assert.isUndefined(registry.getBinding(container)); }); function createTrees(treeDepth: number, treeWidth: number): TreeContainer[] { const trees: TreeContainer[] = []; for (let i: number = 0; i < treeWidth; i++) { let treeContainer: TreeContainer = new TreeContainer(treeDepth, treeWidth); trees.push(treeContainer); } return trees; } });<|fim▁end|>
<|file_name|>config.orchestrator.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { ListToolbarConfigOrchestrator } from '@skyux/list-builder/modules/list-toolbar/state/config/config.orchestrator';
<|file_name|>scalarfloat.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import print_function, absolute_import, division, unicode_literals import sys from .compat import no_limit_int # NOQA if False: # MYPY from typing import Text, Any, Dict, List # NOQA __all__ = ["ScalarFloat", "ExponentialFloat", "ExponentialCapsFloat"] class ScalarFloat(float): def __new__(cls, *args, **kw):<|fim▁hole|> m_lead0 = kw.pop('m_lead0', 0) # type: ignore exp = kw.pop('exp', None) # type: ignore e_width = kw.pop('e_width', None) # type: ignore e_sign = kw.pop('e_sign', None) # type: ignore underscore = kw.pop('underscore', None) # type: ignore v = float.__new__(cls, *args, **kw) # type: ignore v._width = width v._prec = prec v._m_sign = m_sign v._m_lead0 = m_lead0 v._exp = exp v._e_width = e_width v._e_sign = e_sign v._underscore = underscore return v def __iadd__(self, a): # type: ignore # type: (Any) -> Any x = type(self)(self + a) x._width = self._width # type: ignore x._underscore = self._underscore[:] if self._underscore is not None else None # type: ignore # NOQA return x def __ifloordiv__(self, a): # type: ignore # type: (Any) -> Any x = type(self)(self // a) x._width = self._width # type: ignore x._underscore = self._underscore[:] if self._underscore is not None else None # type: ignore # NOQA return x def __imul__(self, a): # type: ignore # type: (Any) -> Any x = type(self)(self * a) x._width = self._width # type: ignore x._underscore = self._underscore[:] if self._underscore is not None else None # type: ignore # NOQA return x def __ipow__(self, a): # type: ignore # type: (Any) -> Any x = type(self)(self ** a) x._width = self._width # type: ignore x._underscore = self._underscore[:] if self._underscore is not None else None # type: ignore # NOQA return x def __isub__(self, a): # type: ignore # type: (Any) -> Any x = type(self)(self - a) x._width = self._width # type: ignore x._underscore = self._underscore[:] if self._underscore is not None else None # type: ignore # NOQA return x def dump(self, out=sys.stdout): # type: (Any) -> Any print('ScalarFloat({}| w:{}, p:{}, s:{}, lz:{}|{}, w:{}, s:{})'.format( self, self._width, self._prec, self._m_sign, self._m_lead0, # type: ignore self._exp, self._e_width, self._e_sign), file=out) # type: ignore class ExponentialFloat(ScalarFloat): def __new__(cls, value, width=None, underscore=None): # type: (Any, Any, Any) -> Any return ScalarFloat.__new__(cls, value, width=width, underscore=underscore) class ExponentialCapsFloat(ScalarFloat): def __new__(cls, value, width=None, underscore=None): # type: (Any, Any, Any) -> Any return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)<|fim▁end|>
# type: (Any, Any, Any) -> Any width = kw.pop('width', None) # type: ignore prec = kw.pop('prec', None) # type: ignore m_sign = kw.pop('m_sign', None) # type: ignore
<|file_name|>UtilTest.java<|end_file_name|><|fim▁begin|>package de.riedquat.java.io; import de.riedquat.java.util.Arrays; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import static de.riedquat.java.io.Util.copy; import static de.riedquat.java.util.Arrays.EMPTY_BYTE_ARRAY; import static org.junit.Assert.assertArrayEquals; public class UtilTest { @Test public void emptyStream_copiesNothing() throws IOException { assertCopies(EMPTY_BYTE_ARRAY);<|fim▁hole|> @Test public void copiesData() throws IOException { assertCopies(Arrays.createRandomByteArray(10001)); } private void assertCopies(final byte[] testData) throws IOException { final ByteArrayInputStream in = new ByteArrayInputStream(testData); final ByteArrayOutputStream out = new ByteArrayOutputStream(); copy(out, in); assertArrayEquals(testData, out.toByteArray()); } }<|fim▁end|>
}
<|file_name|>MasterFaultDetection.java<|end_file_name|><|fim▁begin|>/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.zen.fd; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ProcessedClusterStateNonMasterUpdateTask; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.NotMasterException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; <|fim▁hole|>import static org.elasticsearch.transport.TransportRequestOptions.options; /** * A fault detection that pings the master periodically to see if its alive. */ public class MasterFaultDetection extends FaultDetection { public static final String MASTER_PING_ACTION_NAME = "internal:discovery/zen/fd/master_ping"; public static interface Listener { /** called when pinging the master failed, like a timeout, transport disconnects etc */ void onMasterFailure(DiscoveryNode masterNode, String reason); } private final ClusterService clusterService; private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<>(); private volatile MasterPinger masterPinger; private final Object masterNodeMutex = new Object(); private volatile DiscoveryNode masterNode; private volatile int retryCount; private final AtomicBoolean notifiedMasterFailure = new AtomicBoolean(); public MasterFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName, ClusterService clusterService) { super(settings, threadPool, transportService, clusterName); this.clusterService = clusterService; logger.debug("[master] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout, pingRetryCount); transportService.registerRequestHandler(MASTER_PING_ACTION_NAME, MasterPingRequest::new, ThreadPool.Names.SAME, new MasterPingRequestHandler()); } public DiscoveryNode masterNode() { return this.masterNode; } public void addListener(Listener listener) { listeners.add(listener); } public void removeListener(Listener listener) { listeners.remove(listener); } public void restart(DiscoveryNode masterNode, String reason) { synchronized (masterNodeMutex) { if (logger.isDebugEnabled()) { logger.debug("[master] restarting fault detection against master [{}], reason [{}]", masterNode, reason); } innerStop(); innerStart(masterNode); } } public void start(final DiscoveryNode masterNode, String reason) { synchronized (masterNodeMutex) { if (logger.isDebugEnabled()) { logger.debug("[master] starting fault detection against master [{}], reason [{}]", masterNode, reason); } innerStart(masterNode); } } private void innerStart(final DiscoveryNode masterNode) { this.masterNode = masterNode; this.retryCount = 0; this.notifiedMasterFailure.set(false); // try and connect to make sure we are connected try { transportService.connectToNode(masterNode); } catch (final Exception e) { // notify master failure (which stops also) and bail.. notifyMasterFailure(masterNode, "failed to perform initial connect [" + e.getMessage() + "]"); return; } if (masterPinger != null) { masterPinger.stop(); } this.masterPinger = new MasterPinger(); // we start pinging slightly later to allow the chosen master to complete it's own master election threadPool.schedule(pingInterval, ThreadPool.Names.SAME, masterPinger); } public void stop(String reason) { synchronized (masterNodeMutex) { if (masterNode != null) { if (logger.isDebugEnabled()) { logger.debug("[master] stopping fault detection against master [{}], reason [{}]", masterNode, reason); } } innerStop(); } } private void innerStop() { // also will stop the next ping schedule this.retryCount = 0; if (masterPinger != null) { masterPinger.stop(); masterPinger = null; } this.masterNode = null; } @Override public void close() { super.close(); stop("closing"); this.listeners.clear(); transportService.removeHandler(MASTER_PING_ACTION_NAME); } @Override protected void handleTransportDisconnect(DiscoveryNode node) { synchronized (masterNodeMutex) { if (!node.equals(this.masterNode)) { return; } if (connectOnNetworkDisconnect) { try { transportService.connectToNode(node); // if all is well, make sure we restart the pinger if (masterPinger != null) { masterPinger.stop(); } this.masterPinger = new MasterPinger(); // we use schedule with a 0 time value to run the pinger on the pool as it will run on later threadPool.schedule(TimeValue.timeValueMillis(0), ThreadPool.Names.SAME, masterPinger); } catch (Exception e) { logger.trace("[master] [{}] transport disconnected (with verified connect)", masterNode); notifyMasterFailure(masterNode, "transport disconnected (with verified connect)"); } } else { logger.trace("[master] [{}] transport disconnected", node); notifyMasterFailure(node, "transport disconnected"); } } } private void notifyMasterFailure(final DiscoveryNode masterNode, final String reason) { if (notifiedMasterFailure.compareAndSet(false, true)) { threadPool.generic().execute(new Runnable() { @Override public void run() { for (Listener listener : listeners) { listener.onMasterFailure(masterNode, reason); } } }); stop("master failure, " + reason); } } private class MasterPinger implements Runnable { private volatile boolean running = true; public void stop() { this.running = false; } @Override public void run() { if (!running) { // return and don't spawn... return; } final DiscoveryNode masterToPing = masterNode; if (masterToPing == null) { // master is null, should not happen, but we are still running, so reschedule threadPool.schedule(pingInterval, ThreadPool.Names.SAME, MasterPinger.this); return; } final MasterPingRequest request = new MasterPingRequest(clusterService.localNode().id(), masterToPing.id(), clusterName); final TransportRequestOptions options = options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout); transportService.sendRequest(masterToPing, MASTER_PING_ACTION_NAME, request, options, new BaseTransportResponseHandler<MasterPingResponseResponse>() { @Override public MasterPingResponseResponse newInstance() { return new MasterPingResponseResponse(); } @Override public void handleResponse(MasterPingResponseResponse response) { if (!running) { return; } // reset the counter, we got a good result MasterFaultDetection.this.retryCount = 0; // check if the master node did not get switched on us..., if it did, we simply return with no reschedule if (masterToPing.equals(MasterFaultDetection.this.masterNode())) { // we don't stop on disconnection from master, we keep pinging it threadPool.schedule(pingInterval, ThreadPool.Names.SAME, MasterPinger.this); } } @Override public void handleException(TransportException exp) { if (!running) { return; } synchronized (masterNodeMutex) { // check if the master node did not get switched on us... if (masterToPing.equals(MasterFaultDetection.this.masterNode())) { if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) { handleTransportDisconnect(masterToPing); return; } else if (exp.getCause() instanceof NotMasterException) { logger.debug("[master] pinging a master {} that is no longer a master", masterNode); notifyMasterFailure(masterToPing, "no longer master"); return; } else if (exp.getCause() instanceof ThisIsNotTheMasterYouAreLookingForException) { logger.debug("[master] pinging a master {} that is not the master", masterNode); notifyMasterFailure(masterToPing, "not master"); return; } else if (exp.getCause() instanceof NodeDoesNotExistOnMasterException) { logger.debug("[master] pinging a master {} but we do not exists on it, act as if its master failure", masterNode); notifyMasterFailure(masterToPing, "do not exists on master, act as master failure"); return; } int retryCount = ++MasterFaultDetection.this.retryCount; logger.trace("[master] failed to ping [{}], retry [{}] out of [{}]", exp, masterNode, retryCount, pingRetryCount); if (retryCount >= pingRetryCount) { logger.debug("[master] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", masterNode, pingRetryCount, pingRetryTimeout); // not good, failure notifyMasterFailure(masterToPing, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout"); } else { // resend the request, not reschedule, rely on send timeout transportService.sendRequest(masterToPing, MASTER_PING_ACTION_NAME, request, options, this); } } } } @Override public String executor() { return ThreadPool.Names.SAME; } } ); } } /** Thrown when a ping reaches the wrong node */ static class ThisIsNotTheMasterYouAreLookingForException extends IllegalStateException { ThisIsNotTheMasterYouAreLookingForException(String msg) { super(msg); } ThisIsNotTheMasterYouAreLookingForException() { } @Override public Throwable fillInStackTrace() { return null; } } static class NodeDoesNotExistOnMasterException extends IllegalStateException { @Override public Throwable fillInStackTrace() { return null; } } private class MasterPingRequestHandler implements TransportRequestHandler<MasterPingRequest> { @Override public void messageReceived(final MasterPingRequest request, final TransportChannel channel) throws Exception { final DiscoveryNodes nodes = clusterService.state().nodes(); // check if we are really the same master as the one we seemed to be think we are // this can happen if the master got "kill -9" and then another node started using the same port if (!request.masterNodeId.equals(nodes.localNodeId())) { throw new ThisIsNotTheMasterYouAreLookingForException(); } // ping from nodes of version < 1.4.0 will have the clustername set to null if (request.clusterName != null && !request.clusterName.equals(clusterName)) { logger.trace("master fault detection ping request is targeted for a different [{}] cluster then us [{}]", request.clusterName, clusterName); throw new ThisIsNotTheMasterYouAreLookingForException("master fault detection ping request is targeted for a different [" + request.clusterName + "] cluster then us [" + clusterName + "]"); } // when we are elected as master or when a node joins, we use a cluster state update thread // to incorporate that information in the cluster state. That cluster state is published // before we make it available locally. This means that a master ping can come from a node // that has already processed the new CS but it is not known locally. // Therefore, if we fail we have to check again under a cluster state thread to make sure // all processing is finished. // if (!nodes.localNodeMaster() || !nodes.nodeExists(request.nodeId)) { logger.trace("checking ping from [{}] under a cluster state thread", request.nodeId); clusterService.submitStateUpdateTask("master ping (from: [" + request.nodeId + "])", new ProcessedClusterStateNonMasterUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { // if we are no longer master, fail... DiscoveryNodes nodes = currentState.nodes(); if (!nodes.localNodeMaster()) { throw new NotMasterException("local node is not master"); } if (!nodes.nodeExists(request.nodeId)) { throw new NodeDoesNotExistOnMasterException(); } return currentState; } @Override public void onFailure(String source, @Nullable Throwable t) { if (t == null) { t = new ElasticsearchException("unknown error while processing ping"); } try { channel.sendResponse(t); } catch (IOException e) { logger.warn("error while sending ping response", e); } } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { try { channel.sendResponse(new MasterPingResponseResponse()); } catch (IOException e) { logger.warn("error while sending ping response", e); } } }); } else { // send a response, and note if we are connected to the master or not channel.sendResponse(new MasterPingResponseResponse()); } } } public static class MasterPingRequest extends TransportRequest { private String nodeId; private String masterNodeId; private ClusterName clusterName; public MasterPingRequest() { } private MasterPingRequest(String nodeId, String masterNodeId, ClusterName clusterName) { this.nodeId = nodeId; this.masterNodeId = masterNodeId; this.clusterName = clusterName; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); nodeId = in.readString(); masterNodeId = in.readString(); clusterName = ClusterName.readClusterName(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(nodeId); out.writeString(masterNodeId); clusterName.writeTo(out); } } private static class MasterPingResponseResponse extends TransportResponse { private MasterPingResponseResponse() { } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); } } }<|fim▁end|>
<|file_name|>GnssLogTime.cpp<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /* * File: GnssLogTime.cpp * Author: markov */ #include <boost/date_time/posix_time/posix_time.hpp> #include "gnss_functions.hpp" #include "GnssLogTime.hpp" #include "common_functions.hpp" GnssLogTime::GnssLogTime() { week_ = 0; milliseconds_ = 0; } <|fim▁hole|> week_ = week; milliseconds_ = milliseconds; utcTime_ = getLogTime(week_, milliseconds_); } GnssLogTime::GnssLogTime(const GnssLogTime& gnssLogTime) : stringUtcTime_(gnssLogTime.stringUtcTime_), stringIsoTime_(gnssLogTime.stringIsoTime_) { week_ = gnssLogTime.week_; milliseconds_ = gnssLogTime.milliseconds_; utcTime_ = gnssLogTime.utcTime_; } GnssLogTime::~GnssLogTime() { } GnssLogTime& GnssLogTime::operator=(const GnssLogTime& b) { week_ = b.week_; milliseconds_ = b.milliseconds_; utcTime_ = b.utcTime_; stringUtcTime_ = b.stringUtcTime_; stringIsoTime_ = b.stringIsoTime_; return *this; } bool GnssLogTime::operator==(const GnssLogTime& b) const { return ((week_ == b.week_) && (milliseconds_ == b.milliseconds_)); } bool GnssLogTime::operator!=(const GnssLogTime& b) const { return ((week_ != b.week_) || (milliseconds_ != b.milliseconds_)); } bool GnssLogTime::operator<(const GnssLogTime& b) const { return ((week_ < b.week_) || (milliseconds_ < b.milliseconds_)); } bool GnssLogTime::operator>(const GnssLogTime& b) const { return ((week_ >= b.week_) && (milliseconds_ > b.milliseconds_)); } bool GnssLogTime::operator<=(const GnssLogTime& b) const { return ((*this < b) || (*this == b)); } bool GnssLogTime::operator>=(const GnssLogTime& b) const { return ((*this > b) || (*this == b)); } PTime GnssLogTime::getUtcTime() { //return UTC time in ptime variable return utcTime_; } std::string GnssLogTime::getStringUtcTime() { //return UTC time in string format if (stringUtcTime_.empty()) { stringUtcTime_ = pTimeToSimpleString(utcTime_); } return stringUtcTime_; } std::string GnssLogTime::getStringIsoTime() { //return UTS time in ISO extended string if (stringIsoTime_.empty()) { stringIsoTime_ = pTimeToIsoString(utcTime_); } return stringIsoTime_; } unsigned short GnssLogTime::getWeek() { return week_; } unsigned int GnssLogTime::getMilliseconds() { return milliseconds_; }<|fim▁end|>
GnssLogTime::GnssLogTime(unsigned short week, unsigned int milliseconds) {
<|file_name|>googleAnalyticsAdapter_spec.js<|end_file_name|><|fim▁begin|>import ga from 'modules/googleAnalyticsAdapter.js'; var assert = require('assert'); describe('Ga', function () {<|fim▁hole|> return cpm <= 1 ? '<= 1$' : '> 1$'; } var config = { options: { trackerName: 'foo', enableDistribution: true, cpmDistribution: cpmDistribution } }; // enableAnalytics can only be called once ga.enableAnalytics(config); it('should accept a tracker name option and output prefixed send string', function () { var output = ga.getTrackerSend(); assert.equal(output, 'foo.send'); }); it('should use the custom cpm distribution', function() { assert.equal(ga.getCpmDistribution(0.5), '<= 1$'); assert.equal(ga.getCpmDistribution(1), '<= 1$'); assert.equal(ga.getCpmDistribution(2), '> 1$'); assert.equal(ga.getCpmDistribution(5.23), '> 1$'); }); }); });<|fim▁end|>
describe('enableAnalytics', function () { var cpmDistribution = function(cpm) {
<|file_name|>window.rs<|end_file_name|><|fim▁begin|>use error::Result; use glium_sdl2::{DisplayBuild, SDL2Facade}; use glium::{Frame, Surface}; use platform; use sdl2; use sdl2::Sdl; use sdl2::video::GLProfile; const WINDOW_TITLE: &'static str = "Rusty Doom v0.0.7"; const OPENGL_DEPTH_SIZE: u8 = 24; pub struct Window { facade: SDL2Facade, width: u32, height: u32, } impl Window { pub fn new(sdl: &Sdl, width: u32, height: u32) -> Result<Window> { let video = try!(sdl.video()); let gl_attr = video.gl_attr(); gl_attr.set_context_profile(GLProfile::Core); gl_attr.set_context_major_version(platform::GL_MAJOR_VERSION); gl_attr.set_context_minor_version(platform::GL_MINOR_VERSION); gl_attr.set_depth_size(OPENGL_DEPTH_SIZE); gl_attr.set_double_buffer(true); let facade = try!(video.window(WINDOW_TITLE, width as u32, height as u32) .position_centered() .opengl() .build_glium()); sdl2::clear_error(); Ok(Window { facade: facade, width: width, height: height, }) } pub fn width(&self) -> u32 { self.width } pub fn height(&self) -> u32 { self.height } pub fn aspect_ratio(&self) -> f32 { self.width as f32 / self.height as f32 } pub fn draw(&self) -> Frame { let mut frame = self.facade.draw(); frame.clear_all_srgb((0.06, 0.07, 0.09, 0.0), 1.0, 0); frame<|fim▁hole|> } pub fn facade(&self) -> &SDL2Facade { &self.facade } }<|fim▁end|>
<|file_name|>blog.py<|end_file_name|><|fim▁begin|>import os import datetime import logging ORDER = 999 POSTS_PATH = 'posts/' POSTS = [] from django.template import Context from django.template.loader import get_template from django.template.loader_tags import BlockNode, ExtendsNode def getNode(template, context=Context(), name='subject'): """ Get django block contents from a template. http://stackoverflow.com/questions/2687173/ django-how-can-i-get-a-block-from-a-template """ for node in template: if isinstance(node, BlockNode) and node.name == name: return node.render(context) elif isinstance(node, ExtendsNode): return getNode(node.nodelist, context, name) raise Exception("Node '%s' could not be found in template." % name) def preBuild(site): global POSTS<|fim▁hole|> # Build all the posts for page in site.pages(): if page.path.startswith(POSTS_PATH): # Skip non html posts for obious reasons if not page.path.endswith('.html'): continue # Find a specific defined variable in the page context, # and throw a warning if we're missing it. def find(name): c = page.context() if not name in c: logging.info("Missing info '%s' for post %s" % (name, page.path)) return '' return c.get(name, '') # Build a context for each post postContext = {} postContext['title'] = find('title') postContext['author'] = find('author') postContext['date'] = find('date') postContext['path'] = page.path postContext['body'] = getNode(get_template(page.path), name="body") # Parse the date into a date object try: postContext['date'] = datetime.datetime.strptime(postContext['date'], '%d-%m-%Y') except Exception, e: logging.warning("Date format not correct for page %s, should be dd-mm-yy\n%s" % (page.path, e)) continue POSTS.append(postContext) # Sort the posts by date POSTS = sorted(POSTS, key=lambda x: x['date']) POSTS.reverse() indexes = xrange(0, len(POSTS)) for i in indexes: if i+1 in indexes: POSTS[i]['prevPost'] = POSTS[i+1] if i-1 in indexes: POSTS[i]['nextPost'] = POSTS[i-1] def preBuildPage(site, page, context, data): """ Add the list of posts to every page context so we can access them from wherever on the site. """ context['posts'] = POSTS for post in POSTS: if post['path'] == page.path: context.update(post) return context, data<|fim▁end|>
<|file_name|>html.min.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:54bbb07ae1a9f895764c118915dd17af16bf383083de7e4f867c71d1da32c925<|fim▁hole|><|fim▁end|>
size 972
<|file_name|>redir_assignment_parser.py<|end_file_name|><|fim▁begin|>from vint.ast.traversing import traverse, register_traverser_extension from vint.ast.parsing import Parser from vint.ast.node_type import NodeType REDIR_CONTENT = 'VINT:redir_content' class RedirAssignmentParser(object): """ A class to make redir assignment parseable. """ def process(self, ast):<|fim▁hole|> is_redir_command = node['ea']['cmd'].get('name') == 'redir' if not is_redir_command: return redir_cmd_str = node['str'] is_redir_assignment = '=>' in redir_cmd_str if not is_redir_assignment: return parser = Parser() redir_content_node = parser.parse_redir(node) node[REDIR_CONTENT] = redir_content_node traverse(ast, on_enter=enter_handler) return ast def get_redir_content(node): return node.get(REDIR_CONTENT) @register_traverser_extension def traverse_redir_content(node, on_enter=None, on_leave=None): if REDIR_CONTENT not in node: return traverse(node[REDIR_CONTENT], on_enter=on_enter, on_leave=on_leave)<|fim▁end|>
def enter_handler(node): node_type = NodeType(node['type']) if node_type is not NodeType.EXCMD: return
<|file_name|>public_api.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # SPDX-FileCopyrightText: 2013-2021 Agora Voting SL <[email protected]> # # SPDX-License-Identifier: AGPL-3.0-only # import pickle import base64 import json import re from datetime import datetime from flask import Blueprint, request, make_response, abort from frestq.utils import loads, dumps from frestq.tasks import SimpleTask, TaskError from frestq.app import app, db from models import Election, Authority, QueryQueue from create_election.performer_jobs import check_election_data from taskqueue import queue_task, apply_task, dequeue_task public_api = Blueprint('public_api', __name__) def error(status, message=""): if message: data = json.dumps(dict(message=message)) else: data="" return make_response(data, status) @public_api.route('/dequeue', methods=['GET']) def dequeue(): try: dequeue_task() except Exception as e: return make_response(dumps(dict(status=e.message)), 202) return make_response(dumps(dict(status="ok")), 202) @public_api.route('/election', methods=['POST']) def post_election(): ''' POST /election Creates an election, with the given input data. This involves communicating with the different election authorities to generate the joint public key. Example request: POST /election { "id": 1110, "title": "Votación de candidatos", "description": "Selecciona los documentos polí­tico, ético y organizativo con los que Podemos", "director": "wadobo-auth1", "authorities": "openkratio-authority", "layout": "pcandidates-election", "presentation": { "share_text": "lo que sea", "theme": "foo", "urls": [ { "title": "", "url": "" } ], "theme_css": "whatever" }, "end_date": "2013-12-09T18:17:14.457000", "start_date": "2013-12-06T18:17:14.457000", "questions": [ { "description": "", "layout": "pcandidates-election", "max": 1, "min": 0, "num_winners": 1, "title": "Secretarí­a General", "randomize_answer_order": true, "tally_type": "plurality-at-large", "answer_total_votes_percentage": "over-total-valid-votes", "answers": [ { "id": 0, "category": "Equipo de Enfermeras", "details": "", "sort_order": 1, "urls": [ { "title": "", "url": "" } ], "text": "Fulanita de tal", } ] } ], "authorities": [ { "name": "Asociación Sugus GNU/Linux", "orchestra_url": "https://sugus.eii.us.es/orchestra", "ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----" }, { "name": "Agora Ciudadana", "orchestra_url": "https://agoravoting.com:6874/orchestra", "ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----" }, { "name": "Wadobo Labs", "orchestra_url": "https://wadobo.com:6874/orchestra", "ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----" } ] } On success, response is empty with status 202 Accepted and returns something like: { "task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558", } When the election finally gets processed, the callback_url is called with a POST containing the protInfo.xml file generated jointly by each authority, following this example response: { "status": "finished", "reference": { "election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5", "action": "POST /election" }, "session_data": [{ "session_id": "deadbeef-03fa-4890-aa83-2fc558e645b5", "publickey": ["<pubkey codified in hexadecimal>"] }] } Note that this protInfo.xml will contain the election public key, but also some other information. In particular, it's worth noting that the http and hint servers' urls for each authority could change later, if election-orchestra needs it. If there was an error, then the callback will be called following this example format: { "status": "error", "reference": { "session_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5", "action": "POST /election" }, "data": { "message": "error message" } } ''' data = request.get_json(force=True, silent=True) d = base64.b64encode(pickle.dumps(data)).decode('utf-8') queueid = queue_task(task='election', data=d) return make_response(dumps(dict(queue_id=queueid)), 202) @public_api.route('/tally', methods=['POST']) def post_tally(): ''' POST /tally Tallies an election, with the given input data. This involves communicating with the different election authorities to do the tally. Example request: POST /tally { "election_id": 111, "callback_url": "https://127.0.0.1:5000/public_api/receive_tally", "votes_url": "https://127.0.0.1:5000/public_data/vota4/encrypted_ciphertexts", "votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk" } On success, response is empty with status 202 Accepted and returns something like: { "task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558", } When the election finally gets processed, the callback_url is called with POST similar to the following example: { "status": "finished", "reference": { "election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5", "action": "POST /tally" }, "data": { "votes_url": "https://127.0.0.1:5000/public_data/vota4/tally.tar.bz2", "votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk" } } If there was an error, then the callback will be called following this example format: { "status": "error", "reference": { "election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5", "action": "POST /tally" }, "data": { "message": "error message" } } ''' # first of all, parse input data data = request.get_json(force=True, silent=True) d = base64.b64encode(pickle.dumps(data)).decode('utf-8') queueid = queue_task(task='tally', data=d) return make_response(dumps(dict(queue_id=queueid)), 202) <|fim▁hole|>@public_api.route('/receive_election', methods=['POST']) def receive_election(): ''' This is a test route to be able to test that callbacks are correctly sent ''' print("ATTENTION received election callback: ") print(request.get_json(force=True, silent=True)) return make_response("", 202) @public_api.route('/receive_tally', methods=['POST']) def receive_tally(): ''' This is a test route to be able to test that callbacks are correctly sent ''' print("ATTENTION received tally callback: ") print(request.get_json(force=True, silent=True)) return make_response("", 202)<|fim▁end|>
<|file_name|>TileIdsWithMatches.java<|end_file_name|><|fim▁begin|>package org.janelia.alignment.match; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * List of {@link CanvasMatches} with associated tileIds mapped for fast lookup. * * @author Eric Trautman */ public class TileIdsWithMatches { private final Set<String> tileIds; private final List<CanvasMatches> canvasMatchesList; public TileIdsWithMatches() { this.canvasMatchesList = new ArrayList<>(); this.tileIds = new HashSet<>(); } /** * * @param canvasMatchesList list of matches for section (could include tiles not in stack). * @param stackTileIds set of tile ids in stack. * To be kept, match pair must have both tiles in stack. */ public void addMatches(final List<CanvasMatches> canvasMatchesList,<|fim▁hole|> for (final CanvasMatches canvasMatches : canvasMatchesList) { final String pId = canvasMatches.getpId(); final String qId = canvasMatches.getqId(); if (stackTileIds.contains(pId) && stackTileIds.contains(qId)) { this.canvasMatchesList.add(canvasMatches); this.tileIds.add(pId); this.tileIds.add(qId); } } } public boolean contains(final String tileId) { return tileIds.contains(tileId); } public List<CanvasMatches> getCanvasMatchesList() { return canvasMatchesList; } }<|fim▁end|>
final Set<String> stackTileIds) {
<|file_name|>plot_morph_surface_stc.py<|end_file_name|><|fim▁begin|>""" .. _ex-morph-surface: ============================= Morph surface source estimate ============================= This example demonstrates how to morph an individual subject's :class:`mne.SourceEstimate` to a common reference space. We achieve this using :class:`mne.SourceMorph`. Pre-computed data will be morphed based on a spherical representation of the cortex computed using the spherical registration of :ref:`FreeSurfer <tut-freesurfer>` (https://surfer.nmr.mgh.harvard.edu/fswiki/SurfaceRegAndTemplates) [1]_. This transform will be used to morph the surface vertices of the subject towards the reference vertices. Here we will use 'fsaverage' as a reference space (see https://surfer.nmr.mgh.harvard.edu/fswiki/FsAverage). The transformation will be applied to the surface source estimate. A plot depicting the successful morph will be created for the spherical and inflated surface representation of ``'fsaverage'``, overlaid with the morphed surface source estimate. References ---------- .. [1] Greve D. N., Van der Haegen L., Cai Q., Stufflebeam S., Sabuncu M. R., Fischl B., Brysbaert M. A Surface-based Analysis of Language Lateralization and Cortical Asymmetry. Journal of Cognitive Neuroscience 25(9), 1477-1492, 2013. .. note:: For background information about morphing see :ref:`ch_morph`. """ # Author: Tommy Clausner <[email protected]> # # License: BSD (3-clause) import os import mne from mne.datasets import sample print(__doc__) ############################################################################### # Setup paths sample_dir_raw = sample.data_path() sample_dir = os.path.join(sample_dir_raw, 'MEG', 'sample') subjects_dir = os.path.join(sample_dir_raw, 'subjects') fname_stc = os.path.join(sample_dir, 'sample_audvis-meg') ############################################################################### # Load example data # Read stc from file stc = mne.read_source_estimate(fname_stc, subject='sample') ############################################################################### # Setting up SourceMorph for SourceEstimate # ----------------------------------------- # # In MNE surface source estimates represent the source space simply as # lists of vertices (see # :ref:`tut-source-estimate-class`). # This list can either be obtained from # :class:`mne.SourceSpaces` (src) or from the ``stc`` itself. # # Since the default ``spacing`` (resolution of surface mesh) is ``5`` and # ``subject_to`` is set to 'fsaverage', :class:`mne.SourceMorph` will use # default ico-5 ``fsaverage`` vertices to morph, which are the special # values ``[np.arange(10242)] * 2``. # # .. note:: This is not generally true for other subjects! The set of vertices # used for ``fsaverage`` with ico-5 spacing was designed to be # special. ico-5 spacings for other subjects (or other spacings # for fsaverage) must be calculated and will not be consecutive # integers. # # If src was not defined, the morph will actually not be precomputed, because # we lack the vertices *from* that we want to compute. Instead the morph will # be set up and when applying it, the actual transformation will be computed on # the fly. # # Initialize SourceMorph for SourceEstimate morph = mne.compute_source_morph(stc, subject_from='sample', subject_to='fsaverage', subjects_dir=subjects_dir) ############################################################################### # Apply morph to (Vector) SourceEstimate # -------------------------------------- #<|fim▁hole|> stc_fsaverage = morph.apply(stc) ############################################################################### # Plot results # ------------ # Define plotting parameters surfer_kwargs = dict( hemi='lh', subjects_dir=subjects_dir, clim=dict(kind='value', lims=[8, 12, 15]), views='lateral', initial_time=0.09, time_unit='s', size=(800, 800), smoothing_steps=5) # As spherical surface brain = stc_fsaverage.plot(surface='sphere', **surfer_kwargs) # Add title brain.add_text(0.1, 0.9, 'Morphed to fsaverage (spherical)', 'title', font_size=16) ############################################################################### # As inflated surface brain_inf = stc_fsaverage.plot(surface='inflated', **surfer_kwargs) # Add title brain_inf.add_text(0.1, 0.9, 'Morphed to fsaverage (inflated)', 'title', font_size=16) ############################################################################### # Reading and writing SourceMorph from and to disk # ------------------------------------------------ # # An instance of SourceMorph can be saved, by calling # :meth:`morph.save <mne.SourceMorph.save>`. # # This method allows for specification of a filename under which the ``morph`` # will be save in ".h5" format. If no file extension is provided, "-morph.h5" # will be appended to the respective defined filename:: # # >>> morph.save('my-file-name') # # Reading a saved source morph can be achieved by using # :func:`mne.read_source_morph`:: # # >>> morph = mne.read_source_morph('my-file-name-morph.h5') # # Once the environment is set up correctly, no information such as # ``subject_from`` or ``subjects_dir`` must be provided, since it can be # inferred from the data and use morph to 'fsaverage' by default. SourceMorph # can further be used without creating an instance and assigning it to a # variable. Instead :func:`mne.compute_source_morph` and # :meth:`mne.SourceMorph.apply` can be # easily chained into a handy one-liner. Taking this together the shortest # possible way to morph data directly would be: stc_fsaverage = mne.compute_source_morph(stc, subjects_dir=subjects_dir).apply(stc)<|fim▁end|>
# The morph will be applied to the source estimate data, by giving it as the # first argument to the morph we computed above.
<|file_name|>gitRepository.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # This file should be compatible with both Python 2 and 3. # If it is not, please file a bug report. """ This is a Class which allows one to manipulate a git repository. """ #external imports import os import tempfile #internal imports import subuserlib.subprocessExtras as subprocessExtras from subuserlib.classes.fileStructure import FileStructure class GitRepository(): def __init__(self,path): self.__path = path def getPath(self): return self.__path def run(self,args): """ Run git with the given command line arguments. """ return subprocessExtras.call(["git"]+args,cwd=self.getPath()) def runCollectOutput(self,args): """ Run git with the given command line arguments and return a tuple with (returncode,output). """ return subprocessExtras.callCollectOutput(["git"]+args,cwd=self.getPath()) def getFileStructureAtCommit(self,commit): """ Get a ``FileStructure`` object which relates to the given git commit. """ return GitFileStructure(self,commit) def commit(self,message): """ Run git commit with the given commit message. """ try: tempFile = tempfile.NamedTemporaryFile("w",encoding="utf-8") except TypeError: # Older versions of python have broken tempfile implementation for which you cannot set the encoding. tempFile = tempfile.NamedTemporaryFile("w")<|fim▁hole|> with tempFile as tempFile: tempFile.write(message) tempFile.flush() return self.run(["commit","--file",tempFile.name]) def checkout(self,commit,files=[]): """ Run git checkout """ self.run(["checkout",commit]+files) class GitFileStructure(FileStructure): def __init__(self,gitRepository,commit): """ Initialize the file structure. Here we setup test stuff: >>> import subuserlib.subprocessExtras >>> subuserlib.subprocessExtras.call(["git","init"],cwd="/home/travis/hashtest") 0 >>> subuserlib.subprocessExtras.call(["git","add","."],cwd="/home/travis/hashtest") 0 >>> subuserlib.subprocessExtras.call(["git","commit","-m","Initial commit"],cwd="/home/travis/hashtest") 0 """ self.__gitRepository = gitRepository self.__commit = commit def getCommit(self): return self.__commit def getRepository(self): return self.__gitRepository def lsTree(self, subfolder, extraArgs=[]): """ Returns a list of tuples of the form: (mode,type,hash,path) Coresponding to the items found in the subfolder. """ if not subfolder.endswith("/"): subfolder += "/" if subfolder == "/": subfolder = "./" (returncode,output) = self.getRepository().runCollectOutput(["ls-tree"]+extraArgs+[self.getCommit(),subfolder]) if returncode != 0: return [] # This commenting out is intentional. It is simpler to just return [] here than to check if the repository is properly initialized everywhere else. lines = output.splitlines() items = [] for line in lines: mode,objectType,rest = line.split(" ",2) objectHash,path = rest.split("\t",1) items.append((mode,objectType,objectHash,path)) return items def ls(self, subfolder, extraArgs=[]): """ Returns a list of file and folder paths. Paths are relative to the repository as a whole. >>> from subuserlib.classes.gitRepository import GitRepository >>> gitRepository = GitRepository("/home/travis/hashtest") >>> fileStructure = gitRepository.getFileStructureAtCommit("master") >>> print(",".join(fileStructure.ls("./"))) bar,blah """ items = self.lsTree(subfolder,extraArgs) paths = [] for item in items: paths.append(item[3]) return paths def lsFiles(self,subfolder): """ Returns a list of paths to files in the subfolder. Paths are relative to the repository as a whole. >>> from subuserlib.classes.gitRepository import GitRepository >>> gitRepository = GitRepository("/home/travis/hashtest") >>> fileStructure = gitRepository.getFileStructureAtCommit("master") >>> print(",".join(fileStructure.lsFiles("./"))) blah """ return list(set(self.ls(subfolder)) - set(self.lsFolders(subfolder))) def lsFolders(self,subfolder): """ Returns a list of paths to folders in the subfolder. Paths are relative to the repository as a whole. >>> from subuserlib.classes.gitRepository import GitRepository >>> gitRepository = GitRepository("/home/travis/hashtest") >>> fileStructure = gitRepository.getFileStructureAtCommit("master") >>> print(",".join(fileStructure.lsFolders("./"))) bar """ return self.ls(subfolder,extraArgs=["-d"]) def exists(self,path): """ >>> from subuserlib.classes.gitRepository import GitRepository >>> gitRepository = GitRepository("/home/travis/hashtest") >>> fileStructure = gitRepository.getFileStructureAtCommit("master") >>> fileStructure.exists("./blah") True >>> fileStructure.exists("./non-existant") False """ try: self.read(path) return True except OSError: return False def read(self,path): """ Returns the contents of the given file at the given commit. >>> from subuserlib.classes.gitRepository import GitRepository >>> gitRepository = GitRepository("/home/travis/hashtest") >>> fileStructure = gitRepository.getFileStructureAtCommit("master") >>> print(fileStructure.read("./blah")) blahblah <BLANKLINE> """ (errorcode,content) = self.getRepository().runCollectOutput(["show",self.getCommit()+":"+path]) if errorcode != 0: raise OSError("Git show exited with error "+str(errorcode)+". File does not exist.") return content def getMode(self,path): """ >>> from subuserlib.classes.gitRepository import GitRepository >>> gitRepository = GitRepository("/home/travis/hashtest") >>> fileStructure = gitRepository.getFileStructureAtCommit("master") >>> print(fileStructure.getModeString("./blah")) 100644 """ allObjects = self.lsTree("./",extraArgs=["-r"]) for treeObject in allObjects: if os.path.normpath(treeObject[3]) == os.path.normpath(path): return int(treeObject[0],8)<|fim▁end|>
message = message.encode('ascii', 'ignore').decode('ascii')
<|file_name|>AbstractField.java<|end_file_name|><|fim▁begin|>package net.thevpc.upa.impl; import net.thevpc.upa.*; import net.thevpc.upa.impl.transform.IdentityDataTypeTransform; import net.thevpc.upa.impl.util.NamingStrategy; import net.thevpc.upa.impl.util.NamingStrategyHelper; import net.thevpc.upa.impl.util.PlatformUtils; import net.thevpc.upa.types.*; import net.thevpc.upa.exceptions.UPAException; import net.thevpc.upa.filters.FieldFilter; import net.thevpc.upa.types.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; public abstract class AbstractField extends AbstractUPAObject implements Field, Comparable<Object> { protected Entity entity; protected EntityItem parent; protected DataType dataType; protected Formula persistFormula; protected int persistFormulaOrder; protected Formula updateFormula; protected int updateFormulaOrder; protected Formula queryFormula; protected Object defaultObject; protected SearchOperator searchOperator = SearchOperator.DEFAULT; protected DataTypeTransform typeTransform; protected HashMap<String, Object> properties; protected FlagSet<UserFieldModifier> userModifiers = FlagSets.noneOf(UserFieldModifier.class); protected FlagSet<UserFieldModifier> userExcludeModifiers = FlagSets.noneOf(UserFieldModifier.class); protected FlagSet<FieldModifier> effectiveModifiers = FlagSets.noneOf(FieldModifier.class); protected boolean closed; protected Object unspecifiedValue = UnspecifiedValue.DEFAULT; private AccessLevel persistAccessLevel = AccessLevel.READ_WRITE; private AccessLevel updateAccessLevel = AccessLevel.READ_WRITE; private AccessLevel readAccessLevel = AccessLevel.READ_WRITE; private ProtectionLevel persistProtectionLevel = ProtectionLevel.PUBLIC; private ProtectionLevel updateProtectionLevel = ProtectionLevel.PUBLIC; private ProtectionLevel readProtectionLevel = ProtectionLevel.PUBLIC; private FieldPersister fieldPersister; private PropertyAccessType accessType; private List<Relationship> manyToOneRelationships; private List<Relationship> oneToOneRelationships; private boolean _customDefaultObject = false; private Object _typeDefaultObject = false; protected AbstractField() { } @Override public String getAbsoluteName() { return (getEntity() == null ? "?" : getEntity().getName()) + "." + getName(); } public EntityItem getParent() { return parent; } public void setParent(EntityItem item) { EntityItem old = this.parent; EntityItem recent = item; beforePropertyChangeSupport.firePropertyChange("parent", old, recent); this.parent = item; afterPropertyChangeSupport.firePropertyChange("parent", old, recent); } @Override public void commitModelChanges() { manyToOneRelationships = getManyToOneRelationshipsImpl(); oneToOneRelationships = getOneToOneRelationshipsImpl(); } public boolean is(FieldFilter filter) throws UPAException { return filter.accept(this); } public boolean isId() throws UPAException { return getModifiers().contains(FieldModifier.ID); } @Override public boolean isGeneratedId() throws UPAException { if (!isId()) { return false; } Formula persistFormula = getPersistFormula(); return (persistFormula != null); } public boolean isMain() throws UPAException { return getModifiers().contains(FieldModifier.MAIN); } @Override public boolean isSystem() { return getModifiers().contains(FieldModifier.SYSTEM); } public boolean isSummary() throws UPAException { return getModifiers().contains(FieldModifier.SUMMARY); } public List<Relationship> getManyToOneRelationships() { return manyToOneRelationships; } public List<Relationship> getOneToOneRelationships() { return oneToOneRelationships; } protected List<Relationship> getManyToOneRelationshipsImpl() { List<Relationship> relations = new ArrayList<Relationship>(); for (Relationship r : getPersistenceUnit().getRelationshipsBySource(getEntity())) { Field entityField = r.getSourceRole().getEntityField(); if (entityField != null && entityField.equals(this)) { relations.add(r); } else { List<Field> fields = r.getSourceRole().getFields(); for (Field field : fields) { if (field.equals(this)) { relations.add(r); } } } } return PlatformUtils.trimToSize(relations); } protected List<Relationship> getOneToOneRelationshipsImpl() { List<Relationship> relations = new ArrayList<Relationship>(); for (Relationship r : getPersistenceUnit().getRelationshipsBySource(getEntity())) { Field entityField = r.getSourceRole().getEntityField(); if (entityField != null && entityField.equals(this)) { relations.add(r); } else { List<Field> fields = r.getSourceRole().getFields(); for (Field field : fields) { if (field.equals(this)) { relations.add(r); } } } } return PlatformUtils.trimToSize(relations); } public void setFormula(Formula formula) { setPersistFormula(formula); setUpdateFormula(formula); } @Override public void setFormula(String formula) { setFormula(formula == null ? null : new ExpressionFormula(formula)); } public void setPersistFormula(Formula formula) { this.persistFormula = formula; } public void setUpdateFormula(Formula formula) { this.updateFormula = formula; } @Override public void setFormulaOrder(int order) { setPersistFormulaOrder(order); setUpdateFormulaOrder(order); } public int getUpdateFormulaOrder() { return updateFormulaOrder; } @Override public void setUpdateFormulaOrder(int order) { this.updateFormulaOrder = order; } public int getPersistFormulaOrder() { return persistFormulaOrder; } @Override public void setPersistFormulaOrder(int order) { this.persistFormulaOrder = order; } public Formula getUpdateFormula() { return updateFormula; } @Override public void setUpdateFormula(String formula) { setUpdateFormula(formula == null ? null : new ExpressionFormula(formula)); } public Formula getSelectFormula() { return queryFormula; } @Override public void setSelectFormula(String formula) { setSelectFormula(formula == null ? null : new ExpressionFormula(formula)); } public void setSelectFormula(Formula queryFormula) { this.queryFormula = queryFormula; } // public boolean isRequired() throws UPAException { // return (!isReadOnlyOnPersist() || !isReadOnlyOnUpdate()) && !getDataType().isNullable(); // } public String getPath() { EntityItem parent = getParent(); return parent == null ? ("/" + getName()) : (parent.getPath() + "/" + getName()); } @Override public PersistenceUnit getPersistenceUnit() { return entity.getPersistenceUnit(); } public Formula getPersistFormula() { return persistFormula; } @Override public void setPersistFormula(String formula) { setPersistFormula(formula == null ? null : new ExpressionFormula(formula)); } public Entity getEntity() { return entity; } public void setEntity(Entity entity) { this.entity = entity; } public DataType getDataType() { return dataType; } /** * called by PersistenceUnitFilter / Table You should not use it * * @param datatype datatype */ @Override public void setDataType(DataType datatype) { this.dataType = datatype; if (!getDataType().isNullable()) { _typeDefaultObject = getDataType().getDefaultValue(); } else { _typeDefaultObject = null; } } public Object getDefaultValue() { if (_customDefaultObject) { Object o = ((CustomDefaultObject) defaultObject).getObject(); if (o == null) { o = _typeDefaultObject; } return o; } else { Object o = defaultObject; if (o == null) { o = _typeDefaultObject; } return o; } } public Object getDefaultObject() { return defaultObject; } /** * called by PersistenceUnitFilter / Table You should not use it * * @param o default value witch may be san ObjectHandler */ public void setDefaultObject(Object o) { defaultObject = o; if (o instanceof CustomDefaultObject) { _customDefaultObject = true; } } public FlagSet<FieldModifier> getModifiers() { return effectiveModifiers; } public void setEffectiveModifiers(FlagSet<FieldModifier> effectiveModifiers) { this.effectiveModifiers = effectiveModifiers; } // public void addModifiers(long modifiers) { // setModifiers(getModifiers() | modifiers); // } // // public void removeModifiers(long modifiers) { // setModifiers(getModifiers() & ~modifiers); // } // public Expression getExpression() { // return formula == null ? null : formula.getExpression(); // } @Override public boolean equals(Object other) { return !(other == null || !(other instanceof Field)) && compareTo(other) == 0; } public int compareTo(Object other) { if (other == this) { return 0; } if (other == null) { return 1; } Field f = (Field) other; NamingStrategy comp = NamingStrategyHelper.getNamingStrategy(getEntity().getPersistenceUnit().isCaseSensitiveIdentifiers()); String s1 = entity != null ? comp.getUniformValue(entity.getName()) : ""; String s2 = f.getName() != null ? comp.getUniformValue(f.getEntity().getName()) : ""; int i = s1.compareTo(s2); if (i != 0) { return i; } else { String s3 = getName() != null ? comp.getUniformValue(getName()) : ""; String s4 = f.getName() != null ? comp.getUniformValue(f.getName()) : ""; i = s3.compareTo(s4); return i; } } @Override public FlagSet<UserFieldModifier> getUserModifiers() { return userModifiers; } // public void resetModifiers() { // modifiers = 0; // } public void setUserModifiers(FlagSet<UserFieldModifier> modifiers) { this.userModifiers = modifiers == null ? FlagSets.noneOf(UserFieldModifier.class) : modifiers; } @Override public FlagSet<UserFieldModifier> getUserExcludeModifiers() { return userExcludeModifiers; } public void setUserExcludeModifiers(FlagSet<UserFieldModifier> modifiers) { this.userExcludeModifiers = modifiers == null ? FlagSets.noneOf(UserFieldModifier.class) : modifiers; } @Override public String toString() { return getAbsoluteName(); } @Override<|fim▁hole|> this.closed = true; } public boolean isClosed() { return closed; } @Override public Object getUnspecifiedValue() { return unspecifiedValue; } @Override public void setUnspecifiedValue(Object o) { this.unspecifiedValue = o; } public Object getUnspecifiedValueDecoded() { final Object fuv = getUnspecifiedValue(); if (UnspecifiedValue.DEFAULT.equals(fuv)) { return getDataType().getDefaultUnspecifiedValue(); } else { return fuv; } } public boolean isUnspecifiedValue(Object value) { Object v = getUnspecifiedValueDecoded(); return (v == value || (v != null && v.equals(value))); } public AccessLevel getPersistAccessLevel() { return persistAccessLevel; } public void setPersistAccessLevel(AccessLevel persistAccessLevel) { if (PlatformUtils.isUndefinedEnumValue(AccessLevel.class, persistAccessLevel)) { persistAccessLevel = AccessLevel.READ_WRITE; } this.persistAccessLevel = persistAccessLevel; } public AccessLevel getUpdateAccessLevel() { return updateAccessLevel; } public void setUpdateAccessLevel(AccessLevel updateAccessLevel) { if (PlatformUtils.isUndefinedEnumValue(AccessLevel.class, updateAccessLevel)) { updateAccessLevel = AccessLevel.READ_WRITE; } this.updateAccessLevel = updateAccessLevel; } public AccessLevel getReadAccessLevel() { return readAccessLevel; } public void setReadAccessLevel(AccessLevel readAccessLevel) { if (PlatformUtils.isUndefinedEnumValue(AccessLevel.class, readAccessLevel)) { readAccessLevel = AccessLevel.READ_ONLY; } if (readAccessLevel == AccessLevel.READ_WRITE) { readAccessLevel = AccessLevel.READ_ONLY; } this.readAccessLevel = readAccessLevel; } public void setAccessLevel(AccessLevel accessLevel) { setPersistAccessLevel(accessLevel); setUpdateAccessLevel(accessLevel); setReadAccessLevel(accessLevel); } public ProtectionLevel getPersistProtectionLevel() { return persistProtectionLevel; } public void setPersistProtectionLevel(ProtectionLevel persistProtectionLevel) { if (PlatformUtils.isUndefinedEnumValue(ProtectionLevel.class, persistProtectionLevel)) { persistProtectionLevel = ProtectionLevel.PUBLIC; } this.persistProtectionLevel = persistProtectionLevel; } public ProtectionLevel getUpdateProtectionLevel() { return updateProtectionLevel; } public void setUpdateProtectionLevel(ProtectionLevel updateProtectionLevel) { if (PlatformUtils.isUndefinedEnumValue(ProtectionLevel.class, updateProtectionLevel)) { updateProtectionLevel = ProtectionLevel.PUBLIC; } this.updateProtectionLevel = updateProtectionLevel; } public ProtectionLevel getReadProtectionLevel() { return readProtectionLevel; } public void setReadProtectionLevel(ProtectionLevel readProtectionLevel) { if (PlatformUtils.isUndefinedEnumValue(ProtectionLevel.class, readProtectionLevel)) { readProtectionLevel = ProtectionLevel.PUBLIC; } this.readProtectionLevel = readProtectionLevel; } public void setProtectionLevel(ProtectionLevel persistLevel) { setPersistProtectionLevel(persistLevel); setUpdateProtectionLevel(persistLevel); setReadProtectionLevel(persistLevel); } public SearchOperator getSearchOperator() { return searchOperator; } public void setSearchOperator(SearchOperator searchOperator) { this.searchOperator = searchOperator; } public FieldPersister getFieldPersister() { return fieldPersister; } public void setFieldPersister(FieldPersister fieldPersister) { this.fieldPersister = fieldPersister; } public DataTypeTransform getTypeTransform() { return typeTransform; } @Override public DataTypeTransform getEffectiveTypeTransform() { DataTypeTransform t = getTypeTransform(); if (t == null) { DataType d = getDataType(); if (d != null) { t = new IdentityDataTypeTransform(d); } } return t; } public void setTypeTransform(DataTypeTransform transform) { this.typeTransform = transform; } public PropertyAccessType getPropertyAccessType() { return accessType; } public void setPropertyAccessType(PropertyAccessType accessType) { this.accessType = accessType; } @Override public Object getMainValue(Object instance) { Object v = getValue(instance); if (v != null) { Relationship manyToOneRelationship = getManyToOneRelationship(); if (manyToOneRelationship != null) { v = manyToOneRelationship.getTargetEntity().getBuilder().getMainValue(v); } } return v; } @Override public Object getValue(Object instance) { if (instance instanceof Document) { return ((Document) instance).getObject(getName()); } return getEntity().getBuilder().getProperty(instance, getName()); } @Override public void setValue(Object instance, Object value) { getEntity().getBuilder().setProperty(instance, getName(), value); } @Override public void check(Object value) { getDataType().check(value, getName(), null); } @Override public boolean isManyToOne() { return getDataType() instanceof ManyToOneType; } @Override public boolean isOneToOne() { return getDataType() instanceof OneToOneType; } @Override public ManyToOneRelationship getManyToOneRelationship() { DataType dataType = getDataType(); if (dataType instanceof ManyToOneType) { return (ManyToOneRelationship) ((ManyToOneType) dataType).getRelationship(); } return null; } @Override public OneToOneRelationship getOneToOneRelationship() { DataType dataType = getDataType(); if (dataType instanceof OneToOneType) { return (OneToOneRelationship) ((OneToOneType) dataType).getRelationship(); } return null; } protected void fillFieldInfo(FieldInfo i) { Field f = this; fillObjectInfo(i); DataTypeInfo dataType = f.getDataType() == null ? null : f.getDataType().getInfo(); if (dataType != null) { UPAI18n d = getPersistenceGroup().getI18nOrDefault(); if (f.getDataType() instanceof EnumType) { List<Object> values = ((EnumType) f.getDataType()).getValues(); StringBuilder v = new StringBuilder(); for (Object o : values) { if (v.length() > 0) { v.append(","); } v.append(d.getEnum(o)); } dataType.getProperties().put("titles", String.valueOf(v)); } } i.setDataType(dataType); i.setId(f.isId()); i.setGeneratedId(f.isGeneratedId()); i.setModifiers(f.getModifiers().toArray()); i.setPersistAccessLevel(f.getPersistAccessLevel()); i.setUpdateAccessLevel(f.getUpdateAccessLevel()); i.setReadAccessLevel(f.getReadAccessLevel()); i.setPersistProtectionLevel(f.getPersistProtectionLevel()); i.setUpdateProtectionLevel(f.getUpdateProtectionLevel()); i.setReadProtectionLevel(f.getReadProtectionLevel()); i.setEffectivePersistAccessLevel(f.getEffectivePersistAccessLevel()); i.setEffectiveUpdateAccessLevel(f.getEffectiveUpdateAccessLevel()); i.setEffectiveReadAccessLevel(f.getEffectiveReadAccessLevel()); i.setMain(f.isMain()); i.setSystem(f.getModifiers().contains(FieldModifier.SYSTEM)); i.setSummary(f.isSummary()); i.setManyToOne(f.isManyToOne()); i.setPropertyAccessType(f.getPropertyAccessType()); Relationship r = f.getManyToOneRelationship(); i.setManyToOneRelationship(r == null ? null : r.getName()); } @Override public AccessLevel getEffectiveAccessLevel(AccessMode mode) { if (!PlatformUtils.isUndefinedEnumValue(AccessMode.class, mode)) { switch (mode) { case READ: return getEffectiveReadAccessLevel(); case PERSIST: return getEffectivePersistAccessLevel(); case UPDATE: return getEffectiveUpdateAccessLevel(); } } return AccessLevel.INACCESSIBLE; } @Override public AccessLevel getAccessLevel(AccessMode mode) { if (!PlatformUtils.isUndefinedEnumValue(AccessMode.class, mode)) { switch (mode) { case READ: return getReadAccessLevel(); case PERSIST: return getPersistAccessLevel(); case UPDATE: return getUpdateAccessLevel(); } } return AccessLevel.INACCESSIBLE; } @Override public ProtectionLevel getProtectionLevel(AccessMode mode) { if (!PlatformUtils.isUndefinedEnumValue(AccessMode.class, mode)) { switch (mode) { case READ: return getReadProtectionLevel(); case PERSIST: return getPersistProtectionLevel(); case UPDATE: return getUpdateProtectionLevel(); } } return ProtectionLevel.PRIVATE; } public AccessLevel getEffectivePersistAccessLevel() { if (isSystem()) { return AccessLevel.INACCESSIBLE; } AccessLevel al = getPersistAccessLevel(); ProtectionLevel pl = getPersistProtectionLevel(); if (PlatformUtils.isUndefinedEnumValue(AccessLevel.class, al)) { al = AccessLevel.READ_WRITE; } if (PlatformUtils.isUndefinedEnumValue(ProtectionLevel.class, pl)) { pl = ProtectionLevel.PUBLIC; } boolean hasFormula = getPersistFormula() != null; if (al == AccessLevel.READ_WRITE && hasFormula) { al = AccessLevel.READ_ONLY; } switch (al) { case INACCESSIBLE: { break; } case READ_ONLY: { switch (pl) { case PRIVATE: { al = AccessLevel.INACCESSIBLE; break; } case PROTECTED: { break; } case PUBLIC: { break; } } break; } case READ_WRITE: { switch (pl) { case PRIVATE: { al = AccessLevel.READ_ONLY; break; } case PROTECTED: { if (!getPersistenceUnit().getSecurityManager().isAllowedWrite(this)) { al = AccessLevel.READ_ONLY; } break; } case PUBLIC: { break; } } break; } } if (al != AccessLevel.INACCESSIBLE) { if (isGeneratedId()) { al = AccessLevel.INACCESSIBLE; } if (!getModifiers().contains(FieldModifier.PERSIST_DEFAULT)) { al = AccessLevel.INACCESSIBLE; } } return al; } public AccessLevel getEffectiveUpdateAccessLevel() { if (isSystem()) { return AccessLevel.INACCESSIBLE; } AccessLevel al = getUpdateAccessLevel(); ProtectionLevel pl = getUpdateProtectionLevel(); if (PlatformUtils.isUndefinedEnumValue(AccessLevel.class, al)) { al = AccessLevel.READ_WRITE; } if (PlatformUtils.isUndefinedEnumValue(ProtectionLevel.class, pl)) { pl = ProtectionLevel.PUBLIC; } boolean hasFormula = getUpdateFormula() != null; if (al == AccessLevel.READ_WRITE && hasFormula) { al = AccessLevel.READ_ONLY; } switch (al) { case INACCESSIBLE: { break; } case READ_ONLY: { switch (pl) { case PRIVATE: { al = AccessLevel.INACCESSIBLE; break; } case PROTECTED: { if (!getPersistenceUnit().getSecurityManager().isAllowedRead(this)) { al = AccessLevel.INACCESSIBLE; } break; } case PUBLIC: { break; } } break; } case READ_WRITE: { switch (pl) { case PRIVATE: { al = AccessLevel.READ_ONLY; break; } case PROTECTED: { if (!getPersistenceUnit().getSecurityManager().isAllowedWrite(this)) { al = AccessLevel.READ_ONLY; } if (!getPersistenceUnit().getSecurityManager().isAllowedRead(this)) { al = AccessLevel.INACCESSIBLE; } break; } case PUBLIC: { break; } } break; } } if (isId() && al == AccessLevel.READ_WRITE) { al = AccessLevel.READ_ONLY; } if (getModifiers().contains(FieldModifier.UPDATE_DEFAULT)) { // } else if (getModifiers().contains(FieldModifier.PERSIST_FORMULA) || getModifiers().contains(FieldModifier.PERSIST_SEQUENCE)) { if (al == AccessLevel.READ_WRITE) { al = AccessLevel.READ_ONLY; } } else if (getModifiers().contains(FieldModifier.PERSIST_FORMULA) || getModifiers().contains(FieldModifier.PERSIST_SEQUENCE)) { if (al == AccessLevel.READ_WRITE) { al = AccessLevel.READ_ONLY; } } return al; } public AccessLevel getEffectiveReadAccessLevel() { if (isSystem()) { return AccessLevel.INACCESSIBLE; } AccessLevel al = getReadAccessLevel(); ProtectionLevel pl = getReadProtectionLevel(); if (PlatformUtils.isUndefinedEnumValue(AccessLevel.class, al)) { al = AccessLevel.READ_WRITE; } if (PlatformUtils.isUndefinedEnumValue(ProtectionLevel.class, pl)) { pl = ProtectionLevel.PUBLIC; } if (al == AccessLevel.READ_WRITE) { al = AccessLevel.READ_ONLY; } if (al == AccessLevel.READ_ONLY) { if (!getModifiers().contains(FieldModifier.SELECT)) { al = AccessLevel.INACCESSIBLE; } } switch (al) { case INACCESSIBLE: { break; } case READ_ONLY: { switch (pl) { case PRIVATE: { al = AccessLevel.INACCESSIBLE; break; } case PROTECTED: { if (!getPersistenceUnit().getSecurityManager().isAllowedRead(this)) { al = AccessLevel.INACCESSIBLE; } break; } case PUBLIC: { break; } } break; } } return al; } }<|fim▁end|>
public void close() throws UPAException {
<|file_name|>bebee.py<|end_file_name|><|fim▁begin|># !/usr/bin/python # -*- coding: cp1252 -*- # ################################################################################## # # Copyright 2016-2017 Félix Brezo and Yaiza Rubio (i3visio, [email protected]) # # This program is part of OSRFramework. You can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################## __author__ = "John Doe <[email protected]>" __version__ = "1.0" import argparse import json import re import sys import urllib2 import osrframework.utils.browser as browser from osrframework.utils.platforms import Platform class Bebee(Platform): """ A <Platform> object for Bebee. """ def __init__(self): """ Constructor... """ self.platformName = "Bebee" self.tags = ["jobs"] ######################## # Defining valid modes # ######################## self.isValidMode = {} self.isValidMode["phonefy"] = False self.isValidMode["usufy"] = True self.isValidMode["searchfy"] = False ###################################### # Search URL for the different modes # ###################################### # Strings with the URL for each and every mode self.url = {} #self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>" self.url["usufy"] = "https://bebee.com/bee/" + "<usufy>" #self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>" ###################################### # Whether the user needs credentials # ###################################### self.needsCredentials = {} #self.needsCredentials["phonefy"] = False<|fim▁hole|> self.needsCredentials["usufy"] = False #self.needsCredentials["searchfy"] = False ################# # Valid queries # ################# # Strings that will imply that the query number is not appearing self.validQuery = {} # The regular expression '.+' will match any query #self.validQuery["phonefy"] = ".*" self.validQuery["usufy"] = ".+" #self.validQuery["searchfy"] = ".*" ################### # Not_found clues # ################### # Strings that will imply that the query number is not appearing self.notFoundText = {} #self.notFoundText["phonefy"] = [] self.notFoundText["usufy"] = ['<link rel="canonical" href="https://.bebee.com/bees/search">'] #self.notFoundText["searchfy"] = [] ######################### # Fields to be searched # ######################### self.fieldsRegExp = {} # Definition of regular expressions to be searched in phonefy mode #self.fieldsRegExp["phonefy"] = {} # Example of fields: #self.fieldsRegExp["phonefy"]["i3visio.location"] = "" # Definition of regular expressions to be searched in usufy mode self.fieldsRegExp["usufy"] = {} # Example of fields: self.fieldsRegExp["usufy"]["i3visio.fullname"] = {"start": '<title>', "end": '- beBee</title>'} self.fieldsRegExp["usufy"]["i3visio.location"] = {"start": '<span itemprop="addressRegion">', "end": '</span>'} self.fieldsRegExp["usufy"]["i3visio.alias.googleplus"] = {"start": '<div><a rel="nofollow" class="color_corp_three" href="https://plus.google.com/u/0/', "end": '"'} self.fieldsRegExp["usufy"]["i3visio.alias.linkedin"] = {"start": '<div><a rel="nofollow" class="color_corp_three" href="http://br.linkedin.com/in/', "end": '"'} # Definition of regular expressions to be searched in searchfy mode #self.fieldsRegExp["searchfy"] = {} # Example of fields: #self.fieldsRegExp["searchfy"]["i3visio.location"] = "" ################ # Fields found # ################ # This attribute will be feeded when running the program. self.foundFields = {}<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models from fms import constants as FC from django.utils import timezone from django.core.files import File from django.core.exceptions import ObjectDoesNotExist class DocumentCategory(models.Model): name = models.CharField(max_length=30, null=False, blank=False) code = models.CharField(max_length=5, null=False, blank=False) def __unicode__(self): return self.code+": "+self.name class DocumentSubCategory1(models.Model): cat = models.ForeignKey('DocumentCategory') name = models.CharField(max_length=30, null=False, blank=False) code = models.CharField(max_length=5, null=False, blank=False) def __unicode__(self): return self.cat.code+" / "+self.code class DocumentSubCategory2(models.Model): cat = models.ForeignKey('DocumentCategory') subcat1 = models.ForeignKey('DocumentSubCategory1') name = models.CharField(max_length=30, null=False, blank=False) code = models.CharField(max_length=5, null=False, blank=False) def __unicode__(self): return self.cat.code+" / "+self.subcat1.code+" / "+self.code # Document class Document(models.Model): name = models.CharField(max_length=120, null=False, blank=False) cat = models.ForeignKey('DocumentCategory', null=True, blank=True) # Optional subcat1 = models.ForeignKey('DocumentSubCategory1', null=True, blank=True) # Optional subcat2 = models.ForeignKey('DocumentSubCategory2', null=True, blank=True) # Optional document_number = models.CharField(max_length=15, null=True, blank=True) address = models.CharField(max_length=30, null=False, blank=False, editable=False) rack = models.ForeignKey('DocumentRack', null=True, blank=True) # CR9 - C2 avilability_status = models.BooleanField(default=True, null=False, blank=False) added_on = models.DateTimeField(null=False, editable=False) last_updated = models.DateTimeField(null=False, editable=False) def __unicode__(self):<|fim▁hole|> if not self.id: self.added_on = timezone.now() """ if self.address is not None: _ad = self.address tmp = _ad.split('/') self.cat = DocumentCategory.objects.get(code = tmp[0]) self.subcat1 = DocumentSubCategory1.objects.get(code = tmp[1]) if len(tmp) == 4: self.subcat2 = DocumentSubCategory1.objects.get(code = tmp[2]) self.document_nucodember = tmp[len(tmp)] """ _ad = '' if self.cat is not None: _ad += self.cat.code + "/" if self.subcat1 is not None: _ad += self.subcat1.code + "/" if self.subcat2 is not None: _ad += self.subcat2.code + "/" if self.document_number is not None: _ad += self.document_number self.address = _ad self.last_updated = timezone.now() return super(Document, self).save(*args, **kwargs) class DocumentRack(models.Model): rack_name = models.CharField(max_length=20, null=False, blank=False) # CR9 - C2`` document_type = models.CharField(max_length=30, null=True, blank=True) type = models.CharField(max_length=10, null=True, blank=True) image = models.ForeignKey('DocumentRackImage', null=True, blank=True) def __unicode__(self): return self.rack_name class DocumentRackImage(models.Model): image = models.ImageField(upload_to = 'media/racks/', default = 'media/PLAN.png') def __unicode__(self): return self.image.url # Book Category class Category(models.Model): name = models.CharField(max_length=30, null=False, blank=False) def __unicode__(self): return self.name # Book class Book(models.Model): name = models.CharField(max_length=100, null=False, blank=False) code = models.CharField(max_length=15, null=True, blank=True) categories = models.ManyToManyField('Category', related_name='category_books') author = models.CharField(max_length=60, null=True, blank=True) publisher = models.CharField(max_length=60, null=True, blank=True) isbn_number = models.CharField(max_length=25, null=True, blank=True) address = models.ForeignKey('BookAddress') avilability_status = models.BooleanField(default=True, null=False, blank=False) added_on = models.DateTimeField(null=False, editable=False) last_updated = models.DateTimeField(null=False, editable=False) image = models.ImageField(upload_to = 'media/books/', default = 'media/thumbnail.png') #models.ForeignKey('Image', related_name='images', null=True) def __unicode__(self): return self.name def save(self, *args, **kwargs): if not self.id: self.added_on = timezone.now() """" if not self.image: try: print "try" default_image = Image.objects.get(img = File(open('media/books/thumbnail.png'))) except ObjectDoesNotExist: print "except" default_image = Image(img = File(open('media/thumbnail.png'))) default_image.save() self.image = default_image print self.image """ self.last_updated = timezone.now() return super(Book, self).save(*args, **kwargs) class BookAddress(models.Model): rack = models.CharField(max_length=15, null=True, blank=True) row = models.CharField(max_length=10, null=True, blank=True) shelf_set = models.CharField(max_length=10, null=True, blank=True) def __unicode__(self): return self.rack """ class Image(models.Model): #name = models.CharField(max_length=10, null=False, blank=False) img = models.ImageField(upload_to = 'media/books/', default = 'media/books/thumbnail.png') def __unicode__(self): return self.img.url """ class IssueBooks(models.Model): person_name = models.CharField(max_length=60, null=False, blank=False) person_email = models.EmailField(null=True, blank=True) person_mobile_no = models.CharField(max_length=15, null=True, blank=True) person_group = models.CharField(max_length=10, choices=FC.PERSON_GROUPS, null=False, blank=False) issue_date = models.DateField(null=False, blank=False, default=timezone.now()) return_date = models.DateField(null=True, blank=True, default=timezone.now()+timezone.timedelta(7)) issued_books = models.ManyToManyField('Book', null=True) is_submitted_all_books = models.BooleanField(default=False) def __unicode__(self): return self.person_group+": "+self.person_name<|fim▁end|>
return self.address +" : "+ self.name def save(self, *args, **kwargs):
<|file_name|>make_tuple.hpp<|end_file_name|><|fim▁begin|>/*============================================================================= Copyright (c) 2001-2011 Joel de Guzman Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) ==============================================================================*/ #ifndef BOOST_PP_IS_ITERATING #if !defined(FUSION_MAKE_TUPLE_10032005_0843) #define FUSION_MAKE_TUPLE_10032005_0843 #include <boost/preprocessor/iterate.hpp> #include <boost/preprocessor/repetition/enum_params.hpp> #include <boost/preprocessor/repetition/enum_binary_params.hpp> #include <boost/fusion/tuple/detail/tuple.hpp> #include <boost/fusion/support/detail/as_fusion_element.hpp> namespace boost { namespace fusion { BOOST_FUSION_GPU_ENABLED inline tuple<> make_tuple() { return tuple<>(); } }} #if !defined(BOOST_FUSION_DONT_USE_PREPROCESSED_FILES) #include <boost/fusion/tuple/detail/preprocessed/make_tuple.hpp> #else #if defined(__WAVE__) && defined(BOOST_FUSION_CREATE_PREPROCESSED_FILES) #pragma wave option(preserve: 2, line: 0, output: "preprocessed/make_tuple" FUSION_MAX_VECTOR_SIZE_STR ".hpp") #endif /*============================================================================= Copyright (c) 2001-2011 Joel de Guzman Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) This is an auto-generated file. Do not edit! ==============================================================================*/ #if defined(__WAVE__) && defined(BOOST_FUSION_CREATE_PREPROCESSED_FILES) #pragma wave option(preserve: 1) #endif namespace boost { namespace fusion { #define BOOST_FUSION_AS_FUSION_ELEMENT(z, n, data) \ typename detail::as_fusion_element<BOOST_PP_CAT(T, n)>::type #define BOOST_PP_FILENAME_1 <boost/fusion/tuple/detail/make_tuple.hpp> #define BOOST_PP_ITERATION_LIMITS (1, FUSION_MAX_VECTOR_SIZE) #include BOOST_PP_ITERATE() #undef BOOST_FUSION_AS_FUSION_ELEMENT }} #if defined(__WAVE__) && defined(BOOST_FUSION_CREATE_PREPROCESSED_FILES) #pragma wave option(output: null) #endif #endif // BOOST_FUSION_DONT_USE_PREPROCESSED_FILES #endif #else // defined(BOOST_PP_IS_ITERATING) /////////////////////////////////////////////////////////////////////////////// // // Preprocessor vertical repetition code // /////////////////////////////////////////////////////////////////////////////// #define N BOOST_PP_ITERATION() template <BOOST_PP_ENUM_PARAMS(N, typename T)> BOOST_FUSION_GPU_ENABLED inline tuple<BOOST_PP_ENUM(N, BOOST_FUSION_AS_FUSION_ELEMENT, _)> make_tuple(BOOST_PP_ENUM_BINARY_PARAMS(N, T, const& arg)) { <|fim▁hole|> } #undef N #endif // defined(BOOST_PP_IS_ITERATING)<|fim▁end|>
return tuple<BOOST_PP_ENUM(N, BOOST_FUSION_AS_FUSION_ELEMENT, _)>( BOOST_PP_ENUM_PARAMS(N, arg));
<|file_name|>4.9.py<|end_file_name|><|fim▁begin|># usr/bin/sh # -*- coding:utf8 -*- # @function listDeal # @parma {list} list def listDeal(list):<|fim▁hole|> listString = '' for i in range(0,len(list)): print(i) if i!=(len(list)-1): listString += list[i]+','; else: listString += 'and '+list[i] print(listString) spam = ['apples','bananas','tofu','cats'] listDeal(spam)<|fim▁end|>
<|file_name|>convert_to_tfrecords.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Converts the serialized examples to TFRecords for putting into a model.""" # TODO(alanesuhr): Factor out what should be in a lib and what should be in a # binary. from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import json import os import random from absl import app from absl import flags import apache_beam as beam from language.xsp.data_preprocessing.nl_to_sql_example import NLToSQLExample from language.xsp.model.model_config import load_config import tensorflow.compat.v1 as tf FLAGS = flags.FLAGS flags.DEFINE_string('examples_dir', '', 'The directory containing the examples.') flags.DEFINE_list('filenames', None, 'The list of files to process containing NLToSQLExamples.') flags.DEFINE_string('config', '', 'The path to a model config file.') flags.DEFINE_string('tf_examples_dir', '', 'The location to put the Tensorflow examples.') flags.DEFINE_string('output_vocab', '', 'The location of the output vocabulary.') flags.DEFINE_bool('permute', False, 'Whether to permute the train schemas.') flags.DEFINE_bool('generate_output', False, 'Whether to generate output sequences.') flags.DEFINE_integer( 'num_spider_repeats', 7, 'The number of times to permute the Spider data tables (for train only).') BEG_TOK = '[CLS]' SEP_TOK = '[SEP]' TAB_TOK = '[TAB]' UNK_TOK = '[UNK]' GENERATE_TYPE = 1 COPY_TYPE = 2 COL_TYPE_TO_TOK = { 'text': '[STR_COL]', 'number': '[NUM_COL]', 'others': '[OTH_COL]', 'time': '[TIME_COL]', 'boolean': '[BOOL_COL]', } class InputToken( collections.namedtuple('InputToken', [ 'wordpiece', 'index', 'copy_mask', 'segment_id', 'indicates_foreign_key', 'aligned' ])): pass class OutputAction( collections.namedtuple('OutputAction', ['wordpiece', 'action_id', 'type'])): pass def add_context(key): """Adds context features required by the model.""" features = dict() features['language'] = tf.train.Feature( bytes_list=tf.train.BytesList(value=[b'en'])) features['region'] = tf.train.Feature( bytes_list=tf.train.BytesList(value=[b'US_eng'])) features['type'] = tf.train.Feature(int64_list=tf.train.Int64List(value=[1])) features['weight'] = tf.train.Feature( float_list=tf.train.FloatList(value=[1.0])) features['tag'] = tf.train.Feature( bytes_list=tf.train.BytesList(value=[b'all'])) features['key'] = tf.train.Feature( bytes_list=tf.train.BytesList(value=[key.encode('utf8')])) return features class ConvertToSequenceExampleDoFn(beam.DoFn): """DoFn for converting from NLToSQLExample to a TFRecord.""" def __init__(self, model_config, generate_output, permute, num_repeats, *unused_args, **unused_kwargs): self.model_config = model_config self.input_vocabulary = None self.output_vocabulary = None self.permute = permute self.num_repeats = num_repeats if not self.permute and self.num_repeats > 1: raise ValueError('Not permuting but num_repeats = ' + str(self.num_repeats)) # This cache maps from a proto representing a schema to its string # equivalent # (NOTE: this assumes there's no randomness in the order of the tables, # cols, etc.) self.table_cache = dict() self.generate_output = generate_output def non_parallel_process(self, example): # Load cache if not self.input_vocabulary: with tf.gfile.Open( self.model_config.data_options.bert_vocab_path) as infile: self.input_vocabulary = [ line.rstrip('\n') for line in infile.readlines() ] if not self.output_vocabulary: with tf.gfile.Open(FLAGS.output_vocab) as infile: self.output_vocabulary = [ line.replace('\n', '', 1) for line in infile.readlines() ] results = list() for _ in range(self.num_repeats): # Convert the input to an indexed sequence input_conversion = self._convert_input_to_indexed_sequence( example.model_input, random_permutation=self.permute) if input_conversion is None: return None # input_tokens stores the raw wordpieces, its index in the vocabulary, and # whether it is copiable # The maps store tuples of table or column entities paired with their head # index in input_tokens input_tokens, table_index_map, column_index_map, base_idx = input_conversion # Convert the output to an indexed sequence output_actions = list() if self.generate_output: output_actions = self._convert_output_to_indexed_sequence( example, table_index_map, column_index_map, base_idx) if output_actions is None: return None raw_input_wordpieces = [ input_token.wordpiece for input_token in input_tokens ] for action in output_actions: if action.type == COPY_TYPE: # Copy actions should only either # 1. Copy from the input (i.e., before SEP) # 2. Copy TAB or COL tokens assert input_tokens[ action.action_id].index == self.input_vocabulary.index( TAB_TOK) or input_tokens[action.action_id].index in [ self.input_vocabulary.index(col_tok) for col_tok in COL_TYPE_TO_TOK.values() ] or action.action_id < raw_input_wordpieces.index( SEP_TOK ), 'Unexpected copying action: %r with proto:\n%r' % ( input_tokens[action.action_id], example) assert input_tokens[action.action_id].copy_mask == 1, ( 'Copied, but copy mask is 0: %s at ' 'index %d; copied action was %s') % ( input_tokens[action.action_id], action.action_id, action)<|fim▁hole|> # Actually create the TF Example results.append( self._convert_to_sequence_example( input_tokens, output_actions, example.model_input.original_utterance).SerializeToString()) return results def process(self, example): results = self.non_parallel_process(example) if results is not None: for result in results: yield result def _convert_input_to_sequence_example(self, input_tokens, features): features['source_wordpieces'] = tf.train.FeatureList(feature=[ tf.train.Feature( int64_list=tf.train.Int64List(value=[input_token.index])) for input_token in input_tokens ]) features['copiable_input'] = tf.train.FeatureList(feature=[ tf.train.Feature( int64_list=tf.train.Int64List(value=[input_token.copy_mask])) for input_token in input_tokens ]) copy_features = list() foreign_key_features = list() for input_token in input_tokens: copy_features.append( tf.train.Feature( bytes_list=tf.train.BytesList( value=[input_token.wordpiece.encode('utf8')]))) foreign_key_features.append( tf.train.Feature( int64_list=tf.train.Int64List( value=[input_token.indicates_foreign_key]))) features['copy_strings'] = tf.train.FeatureList(feature=copy_features) features['segment_ids'] = tf.train.FeatureList(feature=[ tf.train.Feature( int64_list=tf.train.Int64List(value=[input_token.segment_id])) for input_token in input_tokens ]) features['indicates_foreign_key'] = tf.train.FeatureList( feature=foreign_key_features) features['utterance_schema_alignment'] = tf.train.FeatureList(feature=[ tf.train.Feature( int64_list=tf.train.Int64List(value=[input_token.aligned])) for input_token in input_tokens ]) def _convert_output_to_sequence_example(self, output_actions, features): features['target_action_ids'] = tf.train.FeatureList(feature=[ tf.train.Feature( int64_list=tf.train.Int64List(value=[action.action_id])) for action in output_actions ]) features['target_action_types'] = tf.train.FeatureList(feature=[ tf.train.Feature(int64_list=tf.train.Int64List(value=[action.type])) for action in output_actions ]) def _convert_to_sequence_example(self, input_tokens, output_actions, utterance): features = collections.OrderedDict() self._convert_input_to_sequence_example(input_tokens, features) self._convert_output_to_sequence_example(output_actions, features) context_features = add_context(utterance) return tf.train.SequenceExample( context=tf.train.Features(feature=context_features), feature_lists=tf.train.FeatureLists(feature_list=features)) def _get_vocab_index_or_unk(self, token, is_input=True): # Note that this will return a 'Unicode equals warning' if the token is a # unicode-only token if is_input: if token in self.input_vocabulary: return self.input_vocabulary.index(token) return self.input_vocabulary.index(UNK_TOK) if token in self.output_vocabulary: # Add 3 to this because there are 3 placeholder tokens in the output # vocabulary that will be used during train (PAD, BEG, and END). return self.output_vocabulary.index(token) + 3 print('Could not find token ' + token.encode('ascii', 'ignore') + ' in output vocabulary.') def _convert_input_to_indexed_sequence(self, model_input, random_permutation): # Everything is tokenized, but need to combine the utterance with the # schema. converted_wordpiece_tokens = list() for wordpiece in model_input.utterance_wordpieces: converted_wordpiece_tokens.append( InputToken(('##' if '##' in wordpiece.wordpiece else '') + model_input.original_utterance[ wordpiece.span_start_index:wordpiece.span_end_index], self._get_vocab_index_or_unk(wordpiece.wordpiece), 1, 0, 0, int(wordpiece.matches_to_schema))) tokens = [ InputToken(BEG_TOK, self.input_vocabulary.index(BEG_TOK), 0, 0, 0, 0) ] + converted_wordpiece_tokens + [ InputToken(SEP_TOK, self.input_vocabulary.index(SEP_TOK), 0, 0, 0, 0) ] table_index_map = list() column_index_map = list() # Add the table tokens # Look it up in the cache string_serial = ','.join([str(table) for table in model_input.tables]) if string_serial in self.table_cache and not random_permutation: tokens_suffix, table_index_map, column_index_map = self.table_cache[ string_serial] else: # The input tokens contain the string to copy, rather than the wordpiece # that's being embedded. tokens_suffix = list() order = list(range(len(model_input.tables))) if random_permutation: random.shuffle(order) for table_segment_idx, table_idx in enumerate(order): table = model_input.tables[table_idx] table_index_map.append((len(tokens_suffix), table)) table_wordpieces_tokens = list() for wordpiece in table.table_name_wordpieces: table_wordpieces_tokens.append( InputToken('', self._get_vocab_index_or_unk(wordpiece.wordpiece), 0, table_segment_idx + 1, 0, int(table.matches_to_utterance))) tokens_suffix.extend([ InputToken( table.original_table_name, self.input_vocabulary.index(TAB_TOK), 1, table_segment_idx + 1, 0, int(table.matches_to_utterance)) ] + table_wordpieces_tokens) col_order = list(range(len(table.table_columns))) if random_permutation: random.shuffle(col_order) # Add the column tokens for this table for col_idx in col_order: column = table.table_columns[col_idx] column_index_map.append((len(tokens_suffix), column)) column_wordpiece_tokens = list() for wordpiece in column.column_name_wordpieces: column_wordpiece_tokens.append( InputToken('', self._get_vocab_index_or_unk(wordpiece.wordpiece), 0, table_segment_idx + 1, int(column.is_foreign_key), int(column.matches_to_utterance))) tokens_suffix.extend([ InputToken( column.original_column_name, self.input_vocabulary.index(COL_TYPE_TO_TOK[ column.column_type]), 1, table_segment_idx + 1, int(column.is_foreign_key), int(column.matches_to_utterance)) ] + column_wordpiece_tokens) # Update cache if not random_permutation: self.table_cache[string_serial] = (tokens_suffix, table_index_map, column_index_map) base_idx = len(tokens) tokens.extend(tokens_suffix) # If there are too many tokens, return None. if len(tokens) > self.model_config.data_options.max_num_tokens: return None return tokens, table_index_map, column_index_map, base_idx def _convert_output_to_indexed_sequence(self, example, table_index_map, column_index_map, base_idx): action_sequence = list() gold_query = example.gold_sql_query if len( gold_query.actions) > self.model_config.data_options.max_decode_length: return None for action in gold_query.actions: if action.symbol: action_sequence.append( OutputAction(action.symbol, self._get_vocab_index_or_unk(action.symbol, False), GENERATE_TYPE)) elif action.entity_copy: found = False if action.entity_copy.copied_table: # Copied a table. table = action.entity_copy.copied_table for index, entity in table_index_map: if entity.original_table_name == table.original_table_name: action_sequence.append( OutputAction(table.original_table_name, index + base_idx, COPY_TYPE)) found = True break else: # Copied a column. column = action.entity_copy.copied_column for index, entity in column_index_map: if entity.original_column_name == column.original_column_name and entity.table_name == column.table_name: action_sequence.append( OutputAction(column.original_column_name, index + base_idx, COPY_TYPE)) found = True break if not found: return None elif action.utterance_copy: copy_wordpiece = action.utterance_copy action_sequence.append( OutputAction(copy_wordpiece.wordpiece, copy_wordpiece.tokenized_index + 1, COPY_TYPE)) if None in [action.action_id for action in action_sequence]: return None return action_sequence def creation_wrapper(process_dataset_fn): """Wrapper for creating the TFRecords files.""" # Create the tf examples directory. if not tf.gfile.IsDirectory(FLAGS.tf_examples_dir): print('Creating TFExamples directory at ' + FLAGS.tf_examples_dir) tf.gfile.MkDir(FLAGS.tf_examples_dir) # Get the model config. model_config = load_config(FLAGS.config) for filename in FLAGS.filenames: if not filename: continue input_path = os.path.join(FLAGS.examples_dir, filename) output_path = os.path.join( FLAGS.tf_examples_dir, filename.split('/')[-1].split('.')[0] + '.tfrecords') permute = 'spider_train' in output_path and FLAGS.permute num_repeats = FLAGS.num_spider_repeats if permute else 1 print('Processing %s. Permute: %r with %d repetitions' % (filename, permute, num_repeats)) print('Writing to ' + output_path) process_dataset_fn(input_path, model_config, permute, num_repeats, output_path) def process_dataset(input_path, model_config, permute, num_repeats, output_path): """Function that processes a dataset without multiprocessing.""" fn = ConvertToSequenceExampleDoFn( model_config, FLAGS.generate_output, permute=permute, num_repeats=num_repeats) with tf.gfile.Open(input_path) as infile: examples = [NLToSQLExample().from_json(json.loads(line)) for line in infile] with tf.python_io.TFRecordWriter(output_path) as writer: num_examples_written = 0 total_examples = 0 for example in examples: total_examples += 1 converteds = fn.non_parallel_process(example) if converteds: num_examples_written += 1 for converted in converteds: writer.write(converted) print('Wrote to %d / %d to %s' % (num_examples_written, total_examples, output_path)) def main(unused_argv): creation_wrapper(process_dataset) if __name__ == '__main__': app.run(main)<|fim▁end|>
<|file_name|>tabTitle.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="react" /> import * as React from "react"; import { ITab2Props, TabId } from "./tab2"; export interface ITabTitleProps extends ITab2Props { /** Handler invoked when this tab is clicked. */ onClick: (id: TabId, event: React.MouseEvent<HTMLElement>) => void; /** ID of the parent `Tabs` to which this tab belongs. Used to generate ID for ARIA attributes. */<|fim▁hole|>} export declare class TabTitle extends React.Component<ITabTitleProps, {}> { static displayName: string; render(): JSX.Element; private handleClick; } export declare function generateTabPanelId(parentId: TabId, tabId: TabId): string; export declare function generateTabTitleId(parentId: TabId, tabId: TabId): string;<|fim▁end|>
parentId: TabId; /** Whether the tab is currently selected. */ selected: boolean;
<|file_name|>abbreviationAction.test.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import 'mocha'; import * as assert from 'assert'; import { Selection, workspace, CompletionList, CancellationTokenSource, CompletionTriggerKind, ConfigurationTarget } from 'vscode'; import { withRandomFileEditor, closeAllEditors } from './testUtils'; import { expandEmmetAbbreviation } from '../abbreviationActions'; import { DefaultCompletionItemProvider } from '../defaultCompletionProvider'; const completionProvider = new DefaultCompletionItemProvider(); const htmlContents = ` <body class="header"> <ul class="nav main"> <li class="item1">img</li> <li class="item2">hithere</li> ul>li ul>li*2 ul>li.item$*2 ul>li.item$@44*2 <div i </ul> <style> .boo { display: dn; m10 } </style> <span></span> (ul>li.item$)*2 (ul>li.item$)*2+span (div>dl>(dt+dd)*2) <script type="text/html"> span.hello </script> <script type="text/javascript"> span.hello </script> </body> `; suite('Tests for Expand Abbreviations (HTML)', () => { const oldValueForExcludeLanguages = workspace.getConfiguration('emmet').inspect('excludeLanguages'); teardown(() => { // close all editors return closeAllEditors; }); test('Expand snippets (HTML)', () => { return testExpandAbbreviation('html', new Selection(3, 23, 3, 23), 'img', '<img src=\"\" alt=\"\">'); }); test('Expand snippets in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(3, 23, 3, 23), 'img', '<img src=\"\" alt=\"\">'); }); test('Expand snippets when no parent node (HTML)', () => { return withRandomFileEditor('img', 'html', (editor, doc) => { editor.selection = new Selection(0, 3, 0, 3); return expandEmmetAbbreviation(null).then(() => { assert.equal(editor.document.getText(), '<img src=\"\" alt=\"\">'); return Promise.resolve(); }); }); }); test('Expand snippets when no parent node in completion list (HTML)', () => { return withRandomFileEditor('img', 'html', (editor, doc) => { editor.selection = new Selection(0, 3, 0, 3); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); if (!completionPromise) { assert.equal(!completionPromise, false, `Got unexpected undefined instead of a completion promise`); return Promise.resolve(); } return completionPromise.then(completionList => { assert.equal(completionList && completionList.items && completionList.items.length > 0, true); if (completionList) { assert.equal(completionList.items[0].label, 'img'); assert.equal((<string>completionList.items[0].documentation || '').replace(/\|/g, ''), '<img src=\"\" alt=\"\">'); } return Promise.resolve(); }); }); }); test('Expand abbreviation (HTML)', () => { return testExpandAbbreviation('html', new Selection(5, 25, 5, 25), 'ul>li', '<ul>\n\t\t\t<li></li>\n\t\t</ul>'); }); test('Expand abbreviation in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(5, 25, 5, 25), 'ul>li', '<ul>\n\t<li></li>\n</ul>'); }); test('Expand text that is neither an abbreviation nor a snippet to tags (HTML)', () => { return testExpandAbbreviation('html', new Selection(4, 20, 4, 27), 'hithere', '<hithere></hithere>'); }); test('Do not Expand text that is neither an abbreviation nor a snippet to tags in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(4, 20, 4, 27), 'hithere', '<hithere></hithere>', true); }); test('Expand abbreviation with repeaters (HTML)', () => { return testExpandAbbreviation('html', new Selection(6, 27, 6, 27), 'ul>li*2', '<ul>\n\t\t\t<li></li>\n\t\t\t<li></li>\n\t\t</ul>'); }); test('Expand abbreviation with repeaters in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(6, 27, 6, 27), 'ul>li*2', '<ul>\n\t<li></li>\n\t<li></li>\n</ul>'); }); test('Expand abbreviation with numbered repeaters (HTML)', () => { return testExpandAbbreviation('html', new Selection(7, 33, 7, 33), 'ul>li.item$*2', '<ul>\n\t\t\t<li class="item1"></li>\n\t\t\t<li class="item2"></li>\n\t\t</ul>'); }); test('Expand abbreviation with numbered repeaters in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(7, 33, 7, 33), 'ul>li.item$*2', '<ul>\n\t<li class="item1"></li>\n\t<li class="item2"></li>\n</ul>'); }); test('Expand abbreviation with numbered repeaters with offset (HTML)', () => { return testExpandAbbreviation('html', new Selection(8, 36, 8, 36), 'ul>li.item$@44*2', '<ul>\n\t\t\t<li class="item44"></li>\n\t\t\t<li class="item45"></li>\n\t\t</ul>'); }); test('Expand abbreviation with numbered repeaters with offset in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(8, 36, 8, 36), 'ul>li.item$@44*2', '<ul>\n\t<li class="item44"></li>\n\t<li class="item45"></li>\n</ul>'); }); test('Expand abbreviation with numbered repeaters in groups (HTML)', () => { return testExpandAbbreviation('html', new Selection(17, 16, 17, 16), '(ul>li.item$)*2', '<ul>\n\t\t<li class="item1"></li>\n\t</ul>\n\t<ul>\n\t\t<li class="item2"></li>\n\t</ul>'); }); test('Expand abbreviation with numbered repeaters in groups in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(17, 16, 17, 16), '(ul>li.item$)*2', '<ul>\n\t<li class="item1"></li>\n</ul>\n<ul>\n\t<li class="item2"></li>\n</ul>'); }); test('Expand abbreviation with numbered repeaters in groups with sibling in the end (HTML)', () => { return testExpandAbbreviation('html', new Selection(18, 21, 18, 21), '(ul>li.item$)*2+span', '<ul>\n\t\t<li class="item1"></li>\n\t</ul>\n\t<ul>\n\t\t<li class="item2"></li>\n\t</ul>\n\t<span></span>'); }); test('Expand abbreviation with numbered repeaters in groups with sibling in the end in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(18, 21, 18, 21), '(ul>li.item$)*2+span', '<ul>\n\t<li class="item1"></li>\n</ul>\n<ul>\n\t<li class="item2"></li>\n</ul>\n<span></span>'); }); test('Expand abbreviation with nested groups (HTML)', () => { return testExpandAbbreviation('html', new Selection(19, 19, 19, 19), '(div>dl>(dt+dd)*2)', '<div>\n\t\t<dl>\n\t\t\t<dt></dt>\n\t\t\t<dd></dd>\n\t\t\t<dt></dt>\n\t\t\t<dd></dd>\n\t\t</dl>\n\t</div>'); }); test('Expand abbreviation with nested groups in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(19, 19, 19, 19), '(div>dl>(dt+dd)*2)', '<div>\n\t<dl>\n\t\t<dt></dt>\n\t\t<dd></dd>\n\t\t<dt></dt>\n\t\t<dd></dd>\n\t</dl>\n</div>'); }); test('Expand tag that is opened, but not closed (HTML)', () => { return testExpandAbbreviation('html', new Selection(9, 6, 9, 6), '<div', '<div></div>'); }); test('Do not Expand tag that is opened, but not closed in completion list (HTML)', () => { return testHtmlCompletionProvider(new Selection(9, 6, 9, 6), '<div', '<div></div>', true); }); test('No expanding text inside open tag (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(2, 4, 2, 4); return expandEmmetAbbreviation(null).then(() => { assert.equal(editor.document.getText(), htmlContents); return Promise.resolve(); }); }); }); test('No expanding text inside open tag in completion list (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(2, 4, 2, 4); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); assert.equal(!completionPromise, true, `Got unexpected comapletion promise instead of undefined`); return Promise.resolve(); }); }); test('No expanding text inside open tag when there is no closing tag (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(9, 8, 9, 8); return expandEmmetAbbreviation(null).then(() => { assert.equal(editor.document.getText(), htmlContents); return Promise.resolve(); }); }); }); test('No expanding text inside open tag when there is no closing tag in completion list (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(9, 8, 9, 8); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); assert.equal(!completionPromise, true, `Got unexpected comapletion promise instead of undefined`); return Promise.resolve(); }); }); test('No expanding text inside open tag when there is no closing tag when there is no parent node (HTML)', () => { const fileContents = '<img s'; return withRandomFileEditor(fileContents, 'html', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); return expandEmmetAbbreviation(null).then(() => { assert.equal(editor.document.getText(), fileContents); return Promise.resolve(); }); }); }); test('No expanding text in completion list inside open tag when there is no closing tag when there is no parent node (HTML)', () => { const fileContents = '<img s'; return withRandomFileEditor(fileContents, 'html', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); assert.equal(!completionPromise, true, `Got unexpected comapletion promise instead of undefined`); return Promise.resolve(); }); }); test('Expand css when inside style tag (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(13, 16, 13, 19); let expandPromise = expandEmmetAbbreviation({ language: 'css' }); if (!expandPromise) { return Promise.resolve(); } return expandPromise.then(() => { assert.equal(editor.document.getText(), htmlContents.replace('m10', 'margin: 10px;')); return Promise.resolve(); }); }); }); test('Expand css when inside style tag in completion list (HTML)', () => { const abbreviation = 'm10'; const expandedText = 'margin: 10px;'; return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(13, 16, 13, 19); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); if (!completionPromise) { assert.equal(1, 2, `Problem with expanding m10`); return Promise.resolve(); } return completionPromise.then((completionList: CompletionList) => { if (!completionList.items || !completionList.items.length) { assert.equal(1, 2, `Problem with expanding m10`); return Promise.resolve(); } const emmetCompletionItem = completionList.items[0]; assert.equal(emmetCompletionItem.label, expandedText, `Label of completion item doesnt match.`); assert.equal((<string>emmetCompletionItem.documentation || '').replace(/\|/g, ''), expandedText, `Docs of completion item doesnt match.`); assert.equal(emmetCompletionItem.filterText, abbreviation, `FilterText of completion item doesnt match.`); return Promise.resolve(); }); }); }); test('No expanding text inside style tag if position is not for property name (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(13, 14, 13, 14); return expandEmmetAbbreviation(null).then(() => { assert.equal(editor.document.getText(), htmlContents); return Promise.resolve(); }); }); }); test('No expanding text in completion list inside style tag if position is not for property name (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(13, 14, 13, 14); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); assert.equal(!completionPromise, true, `Got unexpected comapletion promise instead of undefined`); return Promise.resolve(); }); }); test('Expand css when inside style attribute (HTML)', () => { const styleAttributeContent = '<div style="m10" class="hello"></div>'; return withRandomFileEditor(styleAttributeContent, 'html', (editor, doc) => { editor.selection = new Selection(0, 15, 0, 15); let expandPromise = expandEmmetAbbreviation(null); if (!expandPromise) { return Promise.resolve(); } return expandPromise.then(() => { assert.equal(editor.document.getText(), styleAttributeContent.replace('m10', 'margin: 10px;')); return Promise.resolve(); }); }); }); test('Expand css when inside style attribute in completion list (HTML)', () => { const abbreviation = 'm10'; const expandedText = 'margin: 10px;'; return withRandomFileEditor('<div style="m10" class="hello"></div>', 'html', (editor, doc) => { editor.selection = new Selection(0, 15, 0, 15); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); if (!completionPromise) { assert.equal(1, 2, `Problem with expanding m10`); return Promise.resolve(); } return completionPromise.then((completionList: CompletionList) => { if (!completionList.items || !completionList.items.length) { assert.equal(1, 2, `Problem with expanding m10`); return Promise.resolve(); } const emmetCompletionItem = completionList.items[0]; assert.equal(emmetCompletionItem.label, expandedText, `Label of completion item doesnt match.`); assert.equal((<string>emmetCompletionItem.documentation || '').replace(/\|/g, ''), expandedText, `Docs of completion item doesnt match.`); assert.equal(emmetCompletionItem.filterText, abbreviation, `FilterText of completion item doesnt match.`); return Promise.resolve(); }); }); }); test('Expand html when inside script tag with html type (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(21, 12, 21, 12); let expandPromise = expandEmmetAbbreviation(null); if (!expandPromise) { return Promise.resolve(); } return expandPromise.then(() => { assert.equal(editor.document.getText(), htmlContents.replace('span.hello', '<span class="hello"></span>')); return Promise.resolve(); }); }); }); test('Expand html when inside script tag with html type (HTML)', () => { const abbreviation = 'span.hello'; const expandedText = '<span class="hello"></span>'; return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(21, 12, 21, 12); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); if (!completionPromise) { assert.equal(1, 2, `Problem with expanding span.hello`); return Promise.resolve(); } return completionPromise.then((completionList: CompletionList) => { if (!completionList.items || !completionList.items.length) { assert.equal(1, 2, `Problem with expanding span.hello`); return Promise.resolve(); } const emmetCompletionItem = completionList.items[0]; assert.equal(emmetCompletionItem.label, abbreviation, `Label of completion item doesnt match.`); assert.equal((<string>emmetCompletionItem.documentation || '').replace(/\|/g, ''), expandedText, `Docs of completion item doesnt match.`); return Promise.resolve(); }); }); }); test('No expanding text inside script tag with javascript type (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(24, 12, 24, 12); return expandEmmetAbbreviation(null).then(() => { assert.equal(editor.document.getText(), htmlContents); return Promise.resolve(); }); }); }); test('No expanding text in completion list inside script tag with javascript type (HTML)', () => { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = new Selection(24, 12, 24, 12); const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); assert.equal(!completionPromise, true, `Got unexpected comapletion promise instead of undefined`); return Promise.resolve(); }); }); // test('No expanding when html is excluded in the settings', () => { // return workspace.getConfiguration('emmet').update('excludeLanguages', ['html'], ConfigurationTarget.Global).then(() => { // return testExpandAbbreviation('html', new Selection(9, 6, 9, 6), '', '', true).then(() => { // return workspace.getConfiguration('emmet').update('excludeLanguages', oldValueForExcludeLanguages ? oldValueForExcludeLanguages.globalValue : undefined, ConfigurationTarget.Global); // }); // }); // }); test('No expanding when html is excluded in the settings in completion list', () => { return workspace.getConfiguration('emmet').update('excludeLanguages', ['html'], ConfigurationTarget.Global).then(() => { return testHtmlCompletionProvider(new Selection(9, 6, 9, 6), '', '', true).then(() => { return workspace.getConfiguration('emmet').update('excludeLanguages', oldValueForExcludeLanguages ? oldValueForExcludeLanguages.globalValue : undefined, ConfigurationTarget.Global); }); }); }); // test('No expanding when php (mapped syntax) is excluded in the settings', () => { // return workspace.getConfiguration('emmet').update('excludeLanguages', ['php'], ConfigurationTarget.Global).then(() => { // return testExpandAbbreviation('php', new Selection(9, 6, 9, 6), '', '', true).then(() => { // return workspace.getConfiguration('emmet').update('excludeLanguages', oldValueForExcludeLanguages ? oldValueForExcludeLanguages.globalValue : undefined, ConfigurationTarget.Global); // }); // }); // }); }); suite('Tests for jsx, xml and xsl', () => { const oldValueForSyntaxProfiles = workspace.getConfiguration('emmet').inspect('syntaxProfiles'); teardown(closeAllEditors); test('Expand abbreviation with className instead of class in jsx', () => { return withRandomFileEditor('ul.nav', 'javascriptreact', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); return expandEmmetAbbreviation({ language: 'javascriptreact' }).then(() => { assert.equal(editor.document.getText(), '<ul className="nav"></ul>'); return Promise.resolve(); }); }); }); test('Expand abbreviation with self closing tags for jsx', () => { return withRandomFileEditor('img', 'javascriptreact', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); return expandEmmetAbbreviation({ language: 'javascriptreact' }).then(() => { assert.equal(editor.document.getText(), '<img src="" alt=""/>'); return Promise.resolve(); }); }); }); test('Expand abbreviation with single quotes for jsx', () => { return workspace.getConfiguration('emmet').update('syntaxProfiles', {jsx: {"attr_quotes": "single"}}, ConfigurationTarget.Global).then(() => { return withRandomFileEditor('img', 'javascriptreact', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); return expandEmmetAbbreviation({ language: 'javascriptreact' }).then(() => { assert.equal(editor.document.getText(), '<img src=\'\' alt=\'\'/>'); return workspace.getConfiguration('emmet').update('syntaxProfiles', oldValueForSyntaxProfiles ? oldValueForSyntaxProfiles.globalValue : undefined, ConfigurationTarget.Global); }); }); }); }); test('Expand abbreviation with self closing tags for xml', () => { return withRandomFileEditor('img', 'xml', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); return expandEmmetAbbreviation({ language: 'xml' }).then(() => { assert.equal(editor.document.getText(), '<img src="" alt=""/>'); return Promise.resolve(); }); }); }); test('Expand abbreviation with no self closing tags for html', () => { return withRandomFileEditor('img', 'html', (editor, doc) => { editor.selection = new Selection(0, 6, 0, 6); return expandEmmetAbbreviation({ language: 'html' }).then(() => { assert.equal(editor.document.getText(), '<img src="" alt="">'); return Promise.resolve(); }); }); }); test('No expanding text inside open tag in completion list (jsx)', () => { return testNoCompletion('jsx', htmlContents, new Selection(2, 4, 2, 4)); }); test('No expanding tag that is opened, but not closed in completion list (jsx)', () => { return testNoCompletion('jsx', htmlContents, new Selection(9, 6, 9, 6)); }); test('No expanding text inside open tag when there is no closing tag in completion list (jsx)', () => { return testNoCompletion('jsx', htmlContents, new Selection(9, 8, 9, 8)); }); test('No expanding text in completion list inside open tag when there is no closing tag when there is no parent node (jsx)', () => { return testNoCompletion('jsx', '<img s', new Selection(0, 6, 0, 6)); }); }); function testExpandAbbreviation(syntax: string, selection: Selection, abbreviation: string, expandedText: string, shouldFail?: boolean): Thenable<any> { return withRandomFileEditor(htmlContents, syntax, (editor, doc) => { editor.selection = selection; let expandPromise = expandEmmetAbbreviation(null); if (!expandPromise) { if (!shouldFail) { assert.equal(1, 2, `Problem with expanding ${abbreviation} to ${expandedText}`); } return Promise.resolve(); } return expandPromise.then(() => { assert.equal(editor.document.getText(), htmlContents.replace(abbreviation, expandedText)); return Promise.resolve(); }); }); } function testHtmlCompletionProvider(selection: Selection, abbreviation: string, expandedText: string, shouldFail?: boolean): Thenable<any> { return withRandomFileEditor(htmlContents, 'html', (editor, doc) => { editor.selection = selection; const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); if (!completionPromise) { if (!shouldFail) { assert.equal(1, 2, `Problem with expanding ${abbreviation} to ${expandedText}`); } return Promise.resolve(); } return completionPromise.then((completionList: CompletionList) => { if (!completionList.items || !completionList.items.length) { if (!shouldFail) { assert.equal(1, 2, `Problem with expanding ${abbreviation} to ${expandedText}`); } return Promise.resolve(); } const emmetCompletionItem = completionList.items[0]; assert.equal(emmetCompletionItem.label, abbreviation, `Label of completion item doesnt match.`); assert.equal((<string>emmetCompletionItem.documentation || '').replace(/\|/g, ''), expandedText, `Docs of completion item doesnt match.`); return Promise.resolve(); }); }); } function testNoCompletion(syntax: string, fileContents: string, selection: Selection): Thenable<any> { return withRandomFileEditor(fileContents, syntax, (editor, doc) => {<|fim▁hole|> editor.selection = selection; const cancelSrc = new CancellationTokenSource(); const completionPromise = completionProvider.provideCompletionItems(editor.document, editor.selection.active, cancelSrc.token, { triggerKind: CompletionTriggerKind.Invoke }); assert.equal(!completionPromise, true, `Got unexpected comapletion promise instead of undefined`); return Promise.resolve(); }); }<|fim▁end|>
<|file_name|>types.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ twython.streaming.types ~~~~~~~~~~~~~~~~~~~~~~~ This module contains classes and methods for :class:`TwythonStreamer` to use. """ class TwythonStreamerTypes(object): """Class for different stream endpoints Not all streaming endpoints have nested endpoints. User Streams and Site Streams are single streams with no nested endpoints Status Streams include filter, sample and firehose endpoints """ def __init__(self, streamer): self.streamer = streamer self.statuses = TwythonStreamerTypesStatuses(streamer) def user(self, **params): """Stream user Accepted params found at: https://dev.twitter.com/docs/api/1.1/get/user """ url = 'https://userstream.twitter.com/%s/user.json' \ % self.streamer.api_version self.streamer._request(url, params=params) <|fim▁hole|> https://dev.twitter.com/docs/api/1.1/get/site """ url = 'https://sitestream.twitter.com/%s/site.json' \ % self.streamer.api_version self.streamer._request(url, params=params) class TwythonStreamerTypesStatuses(object): """Class for different statuses endpoints Available so TwythonStreamer.statuses.filter() is available. Just a bit cleaner than TwythonStreamer.statuses_filter(), statuses_sample(), etc. all being single methods in TwythonStreamer """ def __init__(self, streamer): self.streamer = streamer def filter(self, **params): """Stream statuses/filter :param \*\*params: Paramters to send with your stream request Accepted params found at: https://dev.twitter.com/docs/api/1.1/post/statuses/filter """ url = 'https://stream.twitter.com/%s/statuses/filter.json' \ % self.streamer.api_version self.streamer._request(url, 'POST', params=params) def sample(self, **params): """Stream statuses/sample :param \*\*params: Paramters to send with your stream request Accepted params found at: https://dev.twitter.com/docs/api/1.1/get/statuses/sample """ url = 'https://stream.twitter.com/%s/statuses/sample.json' \ % self.streamer.api_version self.streamer._request(url, params=params) def firehose(self, **params): """Stream statuses/firehose :param \*\*params: Paramters to send with your stream request Accepted params found at: https://dev.twitter.com/docs/api/1.1/get/statuses/firehose """ url = 'https://stream.twitter.com/%s/statuses/firehose.json' \ % self.streamer.api_version self.streamer._request(url, params=params)<|fim▁end|>
def site(self, **params): """Stream site Accepted params found at:
<|file_name|>config.example.js<|end_file_name|><|fim▁begin|>'use strict'; exports.port = process.env.PORT || 3000; exports.mongodb = { uri: process.env.MONGOLAB_URI || process.env.MONGOHQ_URL || 'localhost/lulucrawler' }; exports.getThisUrl = ''; exports.companyName = ''; exports.projectName = 'luluCrawler'; exports.systemEmail = '[email protected]'; exports.cryptoKey = 'k3yb0ardc4t'; exports.loginAttempts = { forIp: 50, forIpAndUser: 7, logExpiration: '20m' }; exports.smtp = { from: { name: process.env.SMTP_FROM_NAME || exports.projectName +' Website',<|fim▁hole|> credentials: { user: process.env.SMTP_USERNAME || '[email protected]', password: process.env.SMTP_PASSWORD || 'bl4rg!', host: process.env.SMTP_HOST || 'smtp.gmail.com', ssl: true } };<|fim▁end|>
address: process.env.SMTP_FROM_ADDRESS || '[email protected]' },
<|file_name|>test_transforms.py<|end_file_name|><|fim▁begin|>import os import os.path as op import pytest import numpy as np from numpy.testing import (assert_array_equal, assert_equal, assert_allclose, assert_array_less, assert_almost_equal) import itertools import mne from mne.datasets import testing from mne.fixes import _get_img_fdata from mne import read_trans, write_trans from mne.io import read_info from mne.transforms import (invert_transform, _get_trans, rotation, rotation3d, rotation_angles, _find_trans, combine_transforms, apply_trans, translation, get_ras_to_neuromag_trans, _pol_to_cart, quat_to_rot, rot_to_quat, _angle_between_quats, _find_vector_rotation, _sph_to_cart, _cart_to_sph, _topo_to_sph, _average_quats, _SphericalSurfaceWarp as SphericalSurfaceWarp, rotation3d_align_z_axis, _read_fs_xfm, _write_fs_xfm, _quat_real, _fit_matched_points, _quat_to_euler, _euler_to_quat, _quat_to_affine, _compute_r2, _validate_pipeline) from mne.utils import requires_nibabel, requires_dipy data_path = testing.data_path(download=False) fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif') fname_eve = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw-eve.fif') subjects_dir = op.join(data_path, 'subjects') fname_t1 = op.join(subjects_dir, 'fsaverage', 'mri', 'T1.mgz') base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data') fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt') test_fif_fname = op.join(base_dir, 'test_raw.fif') ctf_fname = op.join(base_dir, 'test_ctf_raw.fif') hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif') def test_tps(): """Test TPS warping.""" az = np.linspace(0., 2 * np.pi, 20, endpoint=False) pol = np.linspace(0, np.pi, 12)[1:-1] sph = np.array(np.meshgrid(1, az, pol, indexing='ij')) sph.shape = (3, -1) assert_equal(sph.shape[1], 200) source = _sph_to_cart(sph.T) destination = source.copy() destination *= 2 destination[:, 0] += 1 # fit with 100 points warp = SphericalSurfaceWarp() assert 'no ' in repr(warp) warp.fit(source[::3], destination[::2]) assert 'oct5' in repr(warp) destination_est = warp.transform(source) assert_allclose(destination_est, destination, atol=1e-3) @testing.requires_testing_data def test_get_trans(): """Test converting '-trans.txt' to '-trans.fif'.""" trans = read_trans(fname) trans = invert_transform(trans) # starts out as head->MRI, so invert trans_2 = _get_trans(fname_trans)[0] assert trans.__eq__(trans_2, atol=1e-5) @testing.requires_testing_data def test_io_trans(tmpdir): """Test reading and writing of trans files.""" tempdir = str(tmpdir) os.mkdir(op.join(tempdir, 'sample')) pytest.raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir) trans0 = read_trans(fname) fname1 = op.join(tempdir, 'sample', 'test-trans.fif') trans0.save(fname1) assert fname1 == _find_trans('sample', subjects_dir=tempdir) trans1 = read_trans(fname1) # check all properties assert trans0 == trans1 # check reading non -trans.fif files pytest.raises(IOError, read_trans, fname_eve) # check warning on bad filenames fname2 = op.join(tempdir, 'trans-test-bad-name.fif') with pytest.warns(RuntimeWarning, match='-trans.fif'): write_trans(fname2, trans0) def test_get_ras_to_neuromag_trans(): """Test the coordinate transformation from ras to neuromag.""" # create model points in neuromag-like space rng = np.random.RandomState(0) anterior = [0, 1, 0] left = [-1, 0, 0] right = [.8, 0, 0] up = [0, 0, 1] rand_pts = rng.uniform(-1, 1, (3, 3)) pts = np.vstack((anterior, left, right, up, rand_pts)) # change coord system rx, ry, rz, tx, ty, tz = rng.uniform(-2 * np.pi, 2 * np.pi, 6) trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz)) pts_changed = apply_trans(trans, pts) # transform back into original space nas, lpa, rpa = pts_changed[:3] hsp_trans = get_ras_to_neuromag_trans(nas, lpa, rpa) pts_restored = apply_trans(hsp_trans, pts_changed) err = "Neuromag transformation failed" assert_allclose(pts_restored, pts, atol=1e-6, err_msg=err) def _cartesian_to_sphere(x, y, z): """Convert using old function.""" hypotxy = np.hypot(x, y) r = np.hypot(hypotxy, z) elev = np.arctan2(z, hypotxy) az = np.arctan2(y, x) return az, elev, r def _sphere_to_cartesian(theta, phi, r): """Convert using old function.""" z = r * np.sin(phi) rcos_phi = r * np.cos(phi) x = rcos_phi * np.cos(theta) y = rcos_phi * np.sin(theta) return x, y, z def test_sph_to_cart(): """Test conversion between sphere and cartesian.""" # Simple test, expected value (11, 0, 0) r, theta, phi = 11., 0., np.pi / 2. z = r * np.cos(phi) rsin_phi = r * np.sin(phi) x = rsin_phi * np.cos(theta) y = rsin_phi * np.sin(theta) coord = _sph_to_cart(np.array([[r, theta, phi]]))[0] assert_allclose(coord, (x, y, z), atol=1e-7) assert_allclose(coord, (r, 0, 0), atol=1e-7) rng = np.random.RandomState(0) # round-trip test coords = rng.randn(10, 3) assert_allclose(_sph_to_cart(_cart_to_sph(coords)), coords, atol=1e-5) # equivalence tests to old versions for coord in coords: sph = _cart_to_sph(coord[np.newaxis]) cart = _sph_to_cart(sph) sph_old = np.array(_cartesian_to_sphere(*coord)) cart_old = _sphere_to_cartesian(*sph_old) sph_old[1] = np.pi / 2. - sph_old[1] # new convention assert_allclose(sph[0], sph_old[[2, 0, 1]], atol=1e-7) assert_allclose(cart[0], cart_old, atol=1e-7) assert_allclose(cart[0], coord, atol=1e-7) def _polar_to_cartesian(theta, r): """Transform polar coordinates to cartesian.""" x = r * np.cos(theta) y = r * np.sin(theta) return x, y def test_polar_to_cartesian(): """Test helper transform function from polar to cartesian.""" r = 1 theta = np.pi # expected values are (-1, 0) x = r * np.cos(theta) y = r * np.sin(theta) coord = _pol_to_cart(np.array([[r, theta]]))[0]<|fim▁hole|> assert_allclose(coord, (x, y), atol=1e-7) assert_allclose(coord, (-1, 0), atol=1e-7) assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-7) rng = np.random.RandomState(0) r = rng.randn(10) theta = rng.rand(10) * (2 * np.pi) polar = np.array((r, theta)).T assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar], _pol_to_cart(polar), atol=1e-7) def _topo_to_phi_theta(theta, radius): """Convert using old function.""" sph_phi = (0.5 - radius) * 180 sph_theta = -theta return sph_phi, sph_theta def test_topo_to_sph(): """Test topo to sphere conversion.""" rng = np.random.RandomState(0) angles = rng.rand(10) * 360 radii = rng.rand(10) angles[0] = 30 radii[0] = 0.25 # new way sph = _topo_to_sph(np.array([angles, radii]).T) new = _sph_to_cart(sph) new[:, [0, 1]] = new[:, [1, 0]] * [-1, 1] # old way for ii, (angle, radius) in enumerate(zip(angles, radii)): sph_phi, sph_theta = _topo_to_phi_theta(angle, radius) if ii == 0: assert_allclose(_topo_to_phi_theta(angle, radius), [45, -30]) azimuth = sph_theta / 180.0 * np.pi elevation = sph_phi / 180.0 * np.pi assert_allclose(sph[ii], [1., azimuth, np.pi / 2. - elevation], atol=1e-7) r = np.ones_like(radius) x, y, z = _sphere_to_cartesian(azimuth, elevation, r) pos = [-y, x, z] if ii == 0: expected = np.array([1. / 2., np.sqrt(3) / 2., 1.]) expected /= np.sqrt(2) assert_allclose(pos, expected, atol=1e-7) assert_allclose(pos, new[ii], atol=1e-7) def test_rotation(): """Test conversion between rotation angles and transformation matrix.""" tests = [(0, 0, 1), (.5, .5, .5), (np.pi, 0, -1.5)] for rot in tests: x, y, z = rot m = rotation3d(x, y, z) m4 = rotation(x, y, z) assert_array_equal(m, m4[:3, :3]) back = rotation_angles(m) assert_almost_equal(actual=back, desired=rot, decimal=12) back4 = rotation_angles(m4) assert_almost_equal(actual=back4, desired=rot, decimal=12) def test_rotation3d_align_z_axis(): """Test rotation3d_align_z_axis.""" # The more complex z axis fails the assert presumably due to tolerance # inp_zs = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, -1], [-0.75071668, -0.62183808, 0.22302888]] exp_res = [[[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]], [[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]], [[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]], [[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]], [[0.53919688, -0.38169517, -0.75071668], [-0.38169517, 0.683832, -0.62183808], [0.75071668, 0.62183808, 0.22302888]]] for res, z in zip(exp_res, inp_zs): assert_allclose(res, rotation3d_align_z_axis(z), atol=1e-7) @testing.requires_testing_data def test_combine(): """Test combining transforms.""" trans = read_trans(fname) inv = invert_transform(trans) combine_transforms(trans, inv, trans['from'], trans['from']) pytest.raises(RuntimeError, combine_transforms, trans, inv, trans['to'], trans['from']) pytest.raises(RuntimeError, combine_transforms, trans, inv, trans['from'], trans['to']) pytest.raises(RuntimeError, combine_transforms, trans, trans, trans['from'], trans['to']) def test_quaternions(): """Test quaternion calculations.""" rots = [np.eye(3)] for fname in [test_fif_fname, ctf_fname, hp_fif_fname]: rots += [read_info(fname)['dev_head_t']['trans'][:3, :3]] # nasty numerical cases rots += [np.array([ [-0.99978541, -0.01873462, -0.00898756], [-0.01873462, 0.62565561, 0.77987608], [-0.00898756, 0.77987608, -0.62587152], ])] rots += [np.array([ [0.62565561, -0.01873462, 0.77987608], [-0.01873462, -0.99978541, -0.00898756], [0.77987608, -0.00898756, -0.62587152], ])] rots += [np.array([ [-0.99978541, -0.00898756, -0.01873462], [-0.00898756, -0.62587152, 0.77987608], [-0.01873462, 0.77987608, 0.62565561], ])] for rot in rots: assert_allclose(rot, quat_to_rot(rot_to_quat(rot)), rtol=1e-5, atol=1e-5) rot = rot[np.newaxis, np.newaxis, :, :] assert_allclose(rot, quat_to_rot(rot_to_quat(rot)), rtol=1e-5, atol=1e-5) # let's make sure our angle function works in some reasonable way for ii in range(3): for jj in range(3): a = np.zeros(3) b = np.zeros(3) a[ii] = 1. b[jj] = 1. expected = np.pi if ii != jj else 0. assert_allclose(_angle_between_quats(a, b), expected, atol=1e-5) y_180 = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, -1.]]) assert_allclose(_angle_between_quats(rot_to_quat(y_180), np.zeros(3)), np.pi) h_180_attitude_90 = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1.]]) assert_allclose(_angle_between_quats(rot_to_quat(h_180_attitude_90), np.zeros(3)), np.pi) def test_vector_rotation(): """Test basic rotation matrix math.""" x = np.array([1., 0., 0.]) y = np.array([0., 1., 0.]) rot = _find_vector_rotation(x, y) assert_array_equal(rot, [[0, -1, 0], [1, 0, 0], [0, 0, 1]]) quat_1 = rot_to_quat(rot) quat_2 = rot_to_quat(np.eye(3)) assert_allclose(_angle_between_quats(quat_1, quat_2), np.pi / 2.) def test_average_quats(): """Test averaging of quaternions.""" sq2 = 1. / np.sqrt(2.) quats = np.array([[0, sq2, sq2], [0, sq2, sq2], [0, sq2, 0], [0, 0, sq2], [sq2, 0, 0]], float) # In MATLAB: # quats = [[0, sq2, sq2, 0]; [0, sq2, sq2, 0]; # [0, sq2, 0, sq2]; [0, 0, sq2, sq2]; [sq2, 0, 0, sq2]]; expected = [quats[0], quats[0], [0, 0.788675134594813, 0.577350269189626], [0, 0.657192299694123, 0.657192299694123], [0.100406058540540, 0.616329446922803, 0.616329446922803]] # Averaging the first two should give the same thing: for lim, ex in enumerate(expected): assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7) quats[1] *= -1 # same quaternion (hidden value is zero here)! rot_0, rot_1 = quat_to_rot(quats[:2]) assert_allclose(rot_0, rot_1, atol=1e-7) for lim, ex in enumerate(expected): assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7) # Assert some symmetry count = 0 extras = [[sq2, sq2, 0]] + list(np.eye(3)) for quat in np.concatenate((quats, expected, extras)): if np.isclose(_quat_real(quat), 0., atol=1e-7): # can flip sign count += 1 angle = _angle_between_quats(quat, -quat) assert_allclose(angle, 0., atol=1e-7) rot_0, rot_1 = quat_to_rot(np.array((quat, -quat))) assert_allclose(rot_0, rot_1, atol=1e-7) assert count == 4 + len(extras) @testing.requires_testing_data @pytest.mark.parametrize('subject', ('fsaverage', 'sample')) def test_fs_xfm(subject, tmpdir): """Test reading and writing of Freesurfer transforms.""" fname = op.join(data_path, 'subjects', subject, 'mri', 'transforms', 'talairach.xfm') xfm, kind = _read_fs_xfm(fname) if subject == 'fsaverage': assert_allclose(xfm, np.eye(4), atol=1e-5) # fsaverage is in MNI assert kind == 'MNI Transform File' tempdir = str(tmpdir) fname_out = op.join(tempdir, 'out.xfm') _write_fs_xfm(fname_out, xfm, kind) xfm_read, kind_read = _read_fs_xfm(fname_out) assert kind_read == kind assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5) # Some wacky one xfm[:3] = np.random.RandomState(0).randn(3, 4) _write_fs_xfm(fname_out, xfm, 'foo') xfm_read, kind_read = _read_fs_xfm(fname_out) assert kind_read == 'foo' assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5) # degenerate conditions with open(fname_out, 'w') as fid: fid.write('foo') with pytest.raises(ValueError, match='Failed to find'): _read_fs_xfm(fname_out) _write_fs_xfm(fname_out, xfm[:2], 'foo') with pytest.raises(ValueError, match='Could not find'): _read_fs_xfm(fname_out) @pytest.fixture() def quats(): """Make some unit quats.""" quats = np.random.RandomState(0).randn(5, 3) quats[:, 0] = 0 # identity quats /= 2 * np.linalg.norm(quats, axis=1, keepdims=True) # some real part return quats def _check_fit_matched_points( p, x, weights, do_scale, angtol=1e-5, dtol=1e-5, stol=1e-7): __tracebackhide__ = True mne.coreg._ALLOW_ANALITICAL = False try: params = mne.coreg.fit_matched_points( p, x, weights=weights, scale=do_scale, out='params') finally: mne.coreg._ALLOW_ANALITICAL = True quat_an, scale_an = _fit_matched_points(p, x, weights, scale=do_scale) assert len(params) == 6 + int(do_scale) q_co = _euler_to_quat(params[:3]) translate_co = params[3:6] angle = np.rad2deg(_angle_between_quats(quat_an[:3], q_co)) dist = np.linalg.norm(quat_an[3:] - translate_co) assert 0 <= angle < angtol, 'angle' assert 0 <= dist < dtol, 'dist' if do_scale: scale_co = params[6] assert_allclose(scale_an, scale_co, rtol=stol, err_msg='scale') # errs trans = _quat_to_affine(quat_an) trans[:3, :3] *= scale_an weights = np.ones(1) if weights is None else weights err_an = np.linalg.norm( weights[:, np.newaxis] * apply_trans(trans, p) - x) trans = mne.coreg._trans_from_params((True, True, do_scale), params) err_co = np.linalg.norm( weights[:, np.newaxis] * apply_trans(trans, p) - x) if err_an > 1e-14: assert err_an < err_co * 1.5 return quat_an, scale_an @pytest.mark.parametrize('scaling', [0.25, 1]) @pytest.mark.parametrize('do_scale', (True, False)) def test_fit_matched_points(quats, scaling, do_scale): """Test analytical least-squares matched point fitting.""" if scaling != 1 and not do_scale: return # no need to test this, it will not be good rng = np.random.RandomState(0) fro = rng.randn(10, 3) translation = rng.randn(3) for qi, quat in enumerate(quats): to = scaling * np.dot(quat_to_rot(quat), fro.T).T + translation for corrupted in (False, True): # mess up a point if corrupted: to[0, 2] += 100 weights = np.ones(len(to)) weights[0] = 0 else: weights = None est, scale_est = _check_fit_matched_points( fro, to, weights=weights, do_scale=do_scale) assert_allclose(scale_est, scaling, rtol=1e-5) assert_allclose(est[:3], quat, atol=1e-14) assert_allclose(est[3:], translation, atol=1e-14) # if we don't adjust for the corruption above, it should get worse angle = dist = None for weighted in (False, True): if not weighted: weights = None dist_bounds = (5, 20) if scaling == 1: angle_bounds = (5, 95) angtol, dtol, stol = 1, 15, 3 else: angle_bounds = (5, 105) angtol, dtol, stol = 20, 15, 3 else: weights = np.ones(len(to)) weights[0] = 10 # weighted=True here means "make it worse" angle_bounds = (angle, 180) # unweighted values as new min dist_bounds = (dist, 100) if scaling == 1: # XXX this angtol is not great but there is a hard to # identify linalg/angle calculation bug on Travis... angtol, dtol, stol = 180, 70, 3 else: angtol, dtol, stol = 50, 70, 3 est, scale_est = _check_fit_matched_points( fro, to, weights=weights, do_scale=do_scale, angtol=angtol, dtol=dtol, stol=stol) assert not np.allclose(est[:3], quat, atol=1e-5) assert not np.allclose(est[3:], translation, atol=1e-5) angle = np.rad2deg(_angle_between_quats(est[:3], quat)) assert_array_less(angle_bounds[0], angle) assert_array_less(angle, angle_bounds[1]) dist = np.linalg.norm(est[3:] - translation) assert_array_less(dist_bounds[0], dist) assert_array_less(dist, dist_bounds[1]) def test_euler(quats): """Test euler transformations.""" euler = _quat_to_euler(quats) quats_2 = _euler_to_quat(euler) assert_allclose(quats, quats_2, atol=1e-14) quat_rot = quat_to_rot(quats) euler_rot = np.array([rotation(*e)[:3, :3] for e in euler]) assert_allclose(quat_rot, euler_rot, atol=1e-14) @requires_nibabel() @requires_dipy() @pytest.mark.slowtest @testing.requires_testing_data def test_volume_registration(): """Test volume registration.""" import nibabel as nib from dipy.align import resample T1 = nib.load(fname_t1) affine = np.eye(4) affine[0, 3] = 10 T1_resampled = resample(moving=T1.get_fdata(), static=T1.get_fdata(), moving_affine=T1.affine, static_affine=T1.affine, between_affine=np.linalg.inv(affine)) for pipeline in ('rigids', ('translation', 'sdr')): reg_affine, sdr_morph = mne.transforms.compute_volume_registration( T1_resampled, T1, pipeline=pipeline, zooms=10, niter=[5]) assert_allclose(affine, reg_affine, atol=0.25) T1_aligned = mne.transforms.apply_volume_registration( T1_resampled, T1, reg_affine, sdr_morph) r2 = _compute_r2(_get_img_fdata(T1_aligned), _get_img_fdata(T1)) assert 99.9 < r2 # check that all orders of the pipeline work for pipeline_len in range(1, 5): for pipeline in itertools.combinations( ('translation', 'rigid', 'affine', 'sdr'), pipeline_len): _validate_pipeline(pipeline) _validate_pipeline(list(pipeline)) with pytest.raises(ValueError, match='Steps in pipeline are out of order'): _validate_pipeline(('sdr', 'affine')) with pytest.raises(ValueError, match='Steps in pipeline should not be repeated'): _validate_pipeline(('affine', 'affine'))<|fim▁end|>
# np.pi is an approx since pi is irrational
<|file_name|>setext_header.rs<|end_file_name|><|fim▁begin|>use regex::Regex; use parser::Block; use parser::Block::Header;<|fim▁hole|> let HORIZONTAL_RULE_1 = Regex::new(r"^===+$").unwrap(); let HORIZONTAL_RULE_2 = Regex::new(r"^---+$").unwrap(); if lines.len() > 1 { if HORIZONTAL_RULE_1.is_match(lines[1]){ return Some((Header(parse_spans(lines[0]), 1), 2)); }else if HORIZONTAL_RULE_2.is_match(lines[1]){ return Some((Header(parse_spans(lines[0]), 2), 2)); } } return None; } #[cfg(test)] mod test { use super::parse_setext_header; use parser::Block::Header; use parser::Span::Text; #[test] fn finds_atx_header() { assert_eq!( parse_setext_header(&vec!["Test", "=========="]).unwrap(), (Header(vec![Text("Test".to_string())], 1), 2) ); assert_eq!( parse_setext_header(&vec!["Test", "----------"]).unwrap(), (Header(vec![Text("Test".to_string())], 2), 2) ); assert_eq!( parse_setext_header(&vec!["This is a test", "==="]).unwrap(), (Header(vec![Text("This is a test".to_string())], 1), 2) ); assert_eq!( parse_setext_header(&vec!["This is a test", "---"]).unwrap(), (Header(vec![Text("This is a test".to_string())], 2), 2) ); } }<|fim▁end|>
use parser::span::parse_spans; pub fn parse_setext_header(lines: &[&str]) -> Option<(Block, usize)> {
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package com.nidzo.filetransfer; import android.content.Intent; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.text.Editable; import android.text.TextWatcher; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.CheckBox; import android.widget.EditText; import android.widget.ListView; import android.widget.ProgressBar; public class MainActivity extends AppCompatActivity { private Communicator communicator; private PeerListAdapter peersAdapter; private ListView peerList; private EditText deviceName; private ProgressBar progressIndicator; private FileHandling fileHandling; private String selectedPeerGuid; private Intent fileToSendIntent; private CheckBox enableEncryption; private TextWatcher deviceNameChangedWatcher = new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { Identification.setName(deviceName.getText().toString(), getApplicationContext()); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Intent intent = getIntent(); if (intent.getAction().equals(Intent.ACTION_SEND)) { fileToSendIntent = intent; } setContentView(R.layout.activity_main); } @Override protected void onResume() { super.onResume();<|fim▁hole|> peersAdapter = new PeerListAdapter(this); peerList = (ListView) findViewById(R.id.peerList); peerList.setAdapter(peersAdapter); deviceName = (EditText) findViewById(R.id.deviceName); progressIndicator = (ProgressBar) findViewById(R.id.progressIndicator); enableEncryption = (CheckBox)findViewById(R.id.enableEncryptionCheckbox); progressStop(); deviceName.setText(Identification.getName(this)); deviceName.addTextChangedListener(deviceNameChangedWatcher); try { communicator = new Communicator(this); communicator.discoverPeers(); fileHandling = new FileHandling(this); updatePeerList(); } catch (FileTransferException e) { DialogBoxes.showMessageBox("Error", "Failed to start " + e.getMessage(), this); } } @Override protected void onPause() { super.onPause(); communicator.halt(); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.refreshPeers) { communicator.discoverPeers(); } return true; } public void updatePeerList() { runOnUiThread(new Runnable() { @Override public void run() { peersAdapter.reset(communicator.getPeers()); } }); } public void progressIndeterminate() { progressIndicator.setVisibility(View.VISIBLE); progressIndicator.setIndeterminate(true); } public void progressReport(double progress) { progressIndicator.setIndeterminate(false); progressIndicator.setVisibility(View.VISIBLE); int progressValue = (int) (progress * 100); progressIndicator.setProgress(progressValue); } public void progressStop() { progressIndicator.setIndeterminate(false); progressIndicator.setVisibility(View.INVISIBLE); progressIndicator.setProgress(0); } public void deletePeer(String guid) { try { communicator.deletePeer(guid); } catch (FileTransferException e) { DialogBoxes.showMessageBox("Error", e.getMessage(), this); } } public void unpairPeer(String guid) { try { communicator.unpairPeer(guid); } catch (FileTransferException e) { DialogBoxes.showMessageBox("Error", e.getMessage(), this); } } public void pair(String guid) { communicator.pair(guid); } public void sendFile(String guid) { selectedPeerGuid = guid; if (fileToSendIntent != null) { communicator.sendFile(guid, fileToSendIntent, enableEncryption.isChecked()); fileToSendIntent = null; } else fileHandling.selectFileToSend(); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent result) { if (requestCode == FileHandling.FILE_SELECT_REQUEST_CODE) { if (resultCode == RESULT_OK) { communicator.sendFile(selectedPeerGuid, result, enableEncryption.isChecked()); } } } public void fileReceived(final java.io.File file) { runOnUiThread(new Runnable() { @Override public void run() { fileHandling.offerToOpenFile(file); } }); } }<|fim▁end|>
<|file_name|>Console.java<|end_file_name|><|fim▁begin|>package com.evanbyrne.vending_machine_kata.ui; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; import java.util.Scanner; import java.util.SortedMap; import org.jooq.lambda.tuple.Tuple2; import com.evanbyrne.vending_machine_kata.coin.Cents; import com.evanbyrne.vending_machine_kata.coin.Coin; import com.evanbyrne.vending_machine_kata.coin.CoinCollection; import com.evanbyrne.vending_machine_kata.coin.CoinFactory; import com.evanbyrne.vending_machine_kata.inventory.IInventoryService; import com.evanbyrne.vending_machine_kata.inventory.InventoryProduct; /** * Manages console input and output. */ public class Console { /** * Get change display for terminal output. * * @param Change. * @return Formatted message. */ public String getChangeDisplay(final CoinCollection change) { final ArrayList<String> display = new ArrayList<String>(); final LinkedHashMap<Coin, Integer> coinMap = new LinkedHashMap<Coin, Integer>(); display.add("THANK YOU"); for(final Coin coin : change.getList()) { if(coinMap.containsKey(coin)) { coinMap.put(coin, coinMap.get(coin) + 1); } else { coinMap.put(coin, 1); } } if(!coinMap.isEmpty()) { final ArrayList<String> displayReturn = new ArrayList<String>(); for(final Map.Entry<Coin, Integer> entry : coinMap.entrySet()) { final String name = entry.getKey().name().toLowerCase(); final int count = entry.getValue(); displayReturn.add(String.format("%s (x%d)", name, count)); } display.add("RETURN: " + String.join(", ", displayReturn)); } return String.join("\n", display); } /** * Get product listing display for terminal output. * * @param Sorted map of all inventory. * @return Formatted message. */ public String getProductDisplay(final SortedMap<String, InventoryProduct> inventory) { if(!inventory.isEmpty()) { final ArrayList<String> display = new ArrayList<String>(); for(final Map.Entry<String, InventoryProduct> entry : inventory.entrySet()) { final String centsString = Cents.toString(entry.getValue().getCents()); final String name = entry.getValue().getName(); final String key = entry.getKey(); display.add(String.format("%s\t%s\t%s", key, centsString, name)); } return String.join("\n", display); } return "No items in vending machine."; } /** * Prompt user for payment. * * Loops until payment >= selected product cost. * * @param Scanner. * @param A product representing their selection. * @return Payment. */ public CoinCollection promptForPayment(final Scanner scanner, final InventoryProduct selection) { final CoinCollection paid = new CoinCollection(); Coin insert; String input; do { System.out.println("PRICE: " + Cents.toString(selection.getCents())); do { System.out.print(String.format("INSERT COIN (%s): ", Cents.toString(paid.getTotal()))); input = scanner.nextLine(); insert = CoinFactory.getByName(input); if(insert == null) { System.out.println("Invalid coin. This machine accepts: quarter, dime, nickel."); } } while(insert == null); paid.addCoin(insert); } while(paid.getTotal() < selection.getCents()); <|fim▁hole|> } /** * Prompt for product selection. * * Loops until a valid product has been selected. * * @param Scanner * @param An implementation of IInventoryService. * @return A tuple with the product key and product. */ public Tuple2<String, InventoryProduct> promptForSelection(final Scanner scanner, final IInventoryService inventoryService) { InventoryProduct selection; String input; do { System.out.print("SELECT: "); input = scanner.nextLine(); selection = inventoryService.getProduct(input); if( selection == null ) { System.out.println("Invalid selection."); } } while(selection == null); return new Tuple2<String, InventoryProduct>(input, selection); } }<|fim▁end|>
return paid;
<|file_name|>unsafe_removed_from_name.rs<|end_file_name|><|fim▁begin|>use clippy_utils::diagnostics::span_lint; use rustc_ast::ast::{Item, ItemKind, UseTree, UseTreeKind}; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::source_map::Span; use rustc_span::symbol::Ident; declare_clippy_lint! {<|fim▁hole|> /// ### Why is this bad? /// Renaming makes it less clear which traits and /// structures are unsafe. /// /// ### Example /// ```rust,ignore /// use std::cell::{UnsafeCell as TotallySafeCell}; /// /// extern crate crossbeam; /// use crossbeam::{spawn_unsafe as spawn}; /// ``` pub UNSAFE_REMOVED_FROM_NAME, style, "`unsafe` removed from API names on import" } declare_lint_pass!(UnsafeNameRemoval => [UNSAFE_REMOVED_FROM_NAME]); impl EarlyLintPass for UnsafeNameRemoval { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { if let ItemKind::Use(ref use_tree) = item.kind { check_use_tree(use_tree, cx, item.span); } } } fn check_use_tree(use_tree: &UseTree, cx: &EarlyContext<'_>, span: Span) { match use_tree.kind { UseTreeKind::Simple(Some(new_name), ..) => { let old_name = use_tree .prefix .segments .last() .expect("use paths cannot be empty") .ident; unsafe_to_safe_check(old_name, new_name, cx, span); }, UseTreeKind::Simple(None, ..) | UseTreeKind::Glob => {}, UseTreeKind::Nested(ref nested_use_tree) => { for &(ref use_tree, _) in nested_use_tree { check_use_tree(use_tree, cx, span); } }, } } fn unsafe_to_safe_check(old_name: Ident, new_name: Ident, cx: &EarlyContext<'_>, span: Span) { let old_str = old_name.name.as_str(); let new_str = new_name.name.as_str(); if contains_unsafe(&old_str) && !contains_unsafe(&new_str) { span_lint( cx, UNSAFE_REMOVED_FROM_NAME, span, &format!( "removed `unsafe` from the name of `{}` in use as `{}`", old_str, new_str ), ); } } #[must_use] fn contains_unsafe(name: &str) -> bool { name.contains("Unsafe") || name.contains("unsafe") }<|fim▁end|>
/// ### What it does /// Checks for imports that remove "unsafe" from an item's /// name. ///
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .current_flow_closeness import * from .current_flow_betweenness import * from .current_flow_betweenness_subset import * from .degree_alg import * from .dispersion import * from .eigenvector import * from .harmonic import * from .katz import * from .load import *<|fim▁end|>
from .betweenness import * from .betweenness_subset import * from .closeness import * from .subgraph_alg import *
<|file_name|>ignores.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2015 ARM Limited # # Licensed under the Apache License, Version 2.0 # See LICENSE file for details. # standard library modules, , , import unittest import os import tempfile # internal modules: from yotta.lib.fsutils import mkDirP, rmRf from yotta.lib.detect import systemDefaultTarget from yotta.lib import component from .cli import cli Test_Files = { '.yotta_ignore': ''' #comment /moo b/c/d b/c/*.txt /a/b/test.txt b/*.c /source/a/b/test.txt /test/foo sometest/a someothertest ignoredbyfname.c ''', 'module.json': ''' { "name": "test-testdep-f", "version": "0.0.6", "description": "Module to test test-dependencies and ignoring things", "author": "autopulated", "licenses": [ { "url": "https://spdx.org/licenses/Apache-2.0", "type": "Apache-2.0" } ], "dependencies": {}, "testDependencies": {} } ''', 'a/b/c/d/e/f/test.txt': '', 'a/b/c/d/e/test.c': '#error should be ignored', 'a/b/c/d/e/test.txt': '', 'a/b/c/d/test.c': '#error should be ignored', 'a/b/c/d/test.txt': '', 'a/b/c/d/z/test.c':'#error should be ignored', 'a/b/c/test.txt': '', 'a/b/test.txt':'', 'a/test.txt':'', 'comment':'# should not be ignored', 'f/f.h':''' #ifndef __F_H__ #define __F_H__ int f(); #endif ''', 'source/moo/test.txt':'', 'source/a/b/c/d/e/f/test.txt': '', 'source/a/b/c/d/e/test.c': '#error should be ignored', 'source/a/b/c/d/e/test.txt': '', 'source/a/b/c/d/test.c': '#error should be ignored', 'source/a/b/c/d/test.txt': '', 'source/a/b/c/d/z/test.c':'#error should be ignored', 'source/a/b/c/test.txt': '', 'source/a/b/test.txt':'', 'source/a/test.txt':'', 'source/f.c':''' int f(){ return 6; } ''', 'test/anothertest/ignoredbyfname.c':'#error should be ignored', 'test/anothertest/ignoredbyfname.c':''' #include <stdio.h> #include "f/f.h" int main(){ int result = f(); printf("%d\n", result); return !(result == 6); } ''', 'test/foo/ignored.c':''' #error should be ignored ''', 'test/someothertest/alsoignored.c':''' #error should be ignored ''', 'test/sometest/a/ignored.c':''' #error should be ignored ''' } Default_Test_Files = { 'module.json': ''' { "name": "test-testdep-f", "version": "0.0.6", "license": "Apache-2.0" }''' } def isWindows():<|fim▁hole|>def writeTestFiles(files): test_dir = tempfile.mkdtemp() for path, contents in files.items(): path_dir, file_name = os.path.split(path) path_dir = os.path.join(test_dir, path_dir) mkDirP(path_dir) with open(os.path.join(path_dir, file_name), 'w') as f: f.write(contents) return test_dir class TestPackIgnores(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_dir = writeTestFiles(Test_Files) @classmethod def tearDownClass(cls): rmRf(cls.test_dir) def test_absolute_ignores(self): c = component.Component(self.test_dir) self.assertTrue(c.ignores('moo')) self.assertTrue(c.ignores('test/foo/ignored.c')) def test_glob_ignores(self): c = component.Component(self.test_dir) self.assertTrue(c.ignores('a/b/c/test.txt')) self.assertTrue(c.ignores('a/b/test.txt')) self.assertTrue(c.ignores('a/b/test.c')) self.assertTrue(c.ignores('source/a/b/c/test.txt')) self.assertTrue(c.ignores('source/a/b/test.txt')) self.assertTrue(c.ignores('source/a/b/test.c')) def test_relative_ignores(self): c = component.Component(self.test_dir) self.assertTrue(c.ignores('a/b/c/d/e/f/test.txt')) self.assertTrue(c.ignores('a/b/test.txt')) self.assertTrue(c.ignores('source/a/b/c/d/e/f/test.txt')) self.assertTrue(c.ignores('source/a/b/test.txt')) self.assertTrue(c.ignores('test/anothertest/ignoredbyfname.c')) self.assertTrue(c.ignores('test/someothertest/alsoignored.c')) def test_default_ignores(self): default_test_dir = writeTestFiles(Default_Test_Files) c = component.Component(default_test_dir) self.assertTrue(c.ignores('.something.c.swp')) self.assertTrue(c.ignores('.something.c~')) self.assertTrue(c.ignores('path/to/.something.c.swm')) self.assertTrue(c.ignores('path/to/.something.c~')) self.assertTrue(c.ignores('.DS_Store')) self.assertTrue(c.ignores('.git')) self.assertTrue(c.ignores('.hg')) self.assertTrue(c.ignores('.svn')) self.assertTrue(c.ignores('yotta_modules')) self.assertTrue(c.ignores('yotta_targets')) self.assertTrue(c.ignores('build')) self.assertTrue(c.ignores('.yotta.json')) rmRf(default_test_dir) def test_comments(self): c = component.Component(self.test_dir) self.assertFalse(c.ignores('comment')) @unittest.skipIf(isWindows(), "can't build natively on windows yet") def test_build(self): stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir) stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'build'], self.test_dir) self.assertNotIn('ignoredbyfname', stdout) self.assertNotIn('someothertest', stdout) self.assertNotIn('sometest', stdout) @unittest.skipIf(isWindows(), "can't build natively on windows yet") def test_test(self): stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir) stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'test'], self.test_dir) self.assertNotIn('ignoredbyfname', stdout) self.assertNotIn('someothertest', stdout) self.assertNotIn('sometest', stdout) def runCheckCommand(self, args, test_dir): stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir) if statuscode != 0: print('command failed with status %s' % statuscode) print(stdout) print(stderr) self.assertEqual(statuscode, 0) return stdout or stderr if __name__ == '__main__': unittest.main()<|fim▁end|>
# can't run tests that hit github without an authn token return os.name == 'nt'
<|file_name|>bitcoin_eu_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="eu_ES" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About POPCoin</source> <translation>POPCoin-i buruz</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;POPCoin&lt;/b&gt; version</source> <translation>&lt;b&gt;POPCoin&lt;/b&gt; bertsioa</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>POPCoin</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Helbide-liburua</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Klik bikoitza helbidea edo etiketa editatzeko</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Sortu helbide berria</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Kopiatu hautatutako helbidea sistemaren arbelera</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation type="unfinished"/> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your POPCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation type="unfinished"/> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Erakutsi &amp;QR kodea</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a POPCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified POPCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Ezabatu</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your POPCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Esportatu Helbide-liburuaren datuak</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Komaz bereizitako artxiboa (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Errorea esportatzean</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Ezin idatzi %1 artxiboan.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiketa</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Helbidea</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(etiketarik ez)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Sartu pasahitza</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Pasahitz berria</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Errepikatu pasahitz berria</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Sartu zorrorako pasahitz berria.&lt;br/&gt; Mesedez erabili &lt;b&gt;gutxienez ausazko 10 karaktere&lt;/b&gt;, edo &lt;b&gt;gutxienez zortzi hitz&lt;/b&gt; pasahitza osatzeko.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Enkriptatu zorroa</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Eragiketa honek zorroaren pasahitza behar du zorroa desblokeatzeko.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desblokeatu zorroa</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Eragiketa honek zure zorroaren pasahitza behar du, zorroa desenkriptatzeko.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desenkriptatu zorroa</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Aldatu pasahitza</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Sartu zorroaren pasahitz zaharra eta berria.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Berretsi zorroaren enkriptazioa</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR POPCOIN&lt;/b&gt;!</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Zorroa enkriptatuta</translation> </message> <message> <location line="-56"/> <source>POPCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your popcoins from being stolen by malware infecting your computer.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Zorroaren enkriptazioak huts egin du</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Zorroaren enkriptazioak huts egin du barne-errore baten ondorioz. Zure zorroa ez da enkriptatu.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Eman dituzun pasahitzak ez datoz bat.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Zorroaren desblokeoak huts egin du</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Zorroa desenkriptatzeko sartutako pasahitza okerra da.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Zorroaren desenkriptazioak huts egin du</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation type="unfinished"/> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation type="unfinished"/> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Sarearekin sinkronizatzen...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Gainbegiratu</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Ikusi zorroaren begirada orokorra</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transakzioak</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Ikusi transakzioen historia</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Editatu gordetako helbide eta etiketen zerrenda</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Erakutsi ordainketak jasotzeko helbideen zerrenda</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>Irten</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Irten aplikaziotik</translation> </message> <message> <location line="+4"/> <source>Show information about POPCoin</source> <translation>Erakutsi POPCoin-i buruzko informazioa</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>&amp;Qt-ari buruz</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Erakutsi POPCoin-i buruzko informazioa</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Aukerak...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation type="unfinished"/> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a POPCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Modify configuration options for POPCoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Aldatu zorroa enkriptatzeko erabilitako pasahitza</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation type="unfinished"/> </message> <message> <location line="-165"/> <location line="+530"/> <source>POPCoin</source> <translation type="unfinished"/> </message> <message> <location line="-530"/> <source>Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About POPCoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your POPCoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified POPCoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Artxiboa</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Ezarpenak</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Laguntza</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Fitxen tresna-barra</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>POPCoin client</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to POPCoin network</source> <translation><numerusform>Konexio aktibo %n POPCoin-en sarera</numerusform><numerusform>%n konexio aktibo POPCoin-en sarera</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Egunean</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Eguneratzen...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Bidalitako transakzioa</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Sarrerako transakzioa</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation type="unfinished"/> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid POPCoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Zorroa &lt;b&gt;enkriptatuta&lt;/b&gt; eta &lt;b&gt;desblokeatuta&lt;/b&gt; dago une honetan</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Zorroa &lt;b&gt;enkriptatuta&lt;/b&gt; eta &lt;b&gt;blokeatuta&lt;/b&gt; dago une honetan</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. POPCoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editatu helbidea</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiketa</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Helbide-liburuko sarrera honekin lotutako etiketa</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Helbidea</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Helbide-liburuko sarrera honekin lotutako helbidea. Bidaltzeko helbideeta soilik alda daiteke.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Jasotzeko helbide berria</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Bidaltzeko helbide berria</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editatu jasotzeko helbidea</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editatu bidaltzeko helbidea</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Sartu berri den helbidea, &quot;%1&quot;, helbide-liburuan dago jadanik.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid POPCoin address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Ezin desblokeatu zorroa.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Gako berriaren sorrerak huts egin du.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>POPCoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Aukerak</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Automatically start POPCoin after logging in to the system.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Start POPCoin on system login</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Automatically open the POPCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Connect to the POPCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting POPCoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Whether to show POPCoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation type="unfinished"/> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting POPCoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"/> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Inprimakia</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the POPCoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Saldoa:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Konfirmatu gabe:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Azken transakzioak&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Zure uneko saldoa</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Oraindik konfirmatu gabe daudenez, uneko saldoab kontatu gabe dagoen transakzio kopurua</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start popcoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Kopurua</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>&amp;Etiketa:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Mezua</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>Gorde honela...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation type="unfinished"/> </message> <message> <location line="-217"/> <source>Client version</source> <translation type="unfinished"/> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation type="unfinished"/> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the POPCoin-Qt help message to get a list with possible POPCoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location line="-260"/> <source>Build date</source> <translation type="unfinished"/> </message> <message> <location line="-104"/> <source>POPCoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>POPCoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the POPCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the POPCoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Bidali txanponak</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Bidali hainbat jasotzaileri batera</translation> </message> <message><|fim▁hole|> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Saldoa:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Berretsi bidaltzeko ekintza</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; honi: %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Berretsi txanponak bidaltzea</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Ziur zaude %1 bidali nahi duzula?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>eta</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Ordaintzeko kopurua 0 baino handiagoa izan behar du.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Inprimakia</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>K&amp;opurua:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Ordaindu &amp;honi:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Sartu etiketa bat helbide honetarako, eta gehitu zure helbide-liburuan</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Etiketa:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Itsatsi helbidea arbeletik</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Ezabatu jasotzaile hau</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a POPCoin address (e.g. sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK)</source> <translation>Sartu Bitocin helbide bat (adb.: sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK) </translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Itsatsi helbidea arbeletik</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this POPCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified POPCoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a POPCoin address (e.g. sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK)</source> <translation>Sartu Bitocin helbide bat (adb.: sSxjmkQbhzcbnhNLPru6TwPy4HRPogaDcK) </translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter POPCoin signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>POPCoin</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Zabalik %1 arte</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/konfirmatu gabe</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 konfirmazioak</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation type="unfinished"/> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation type="unfinished"/> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Net amount</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Message</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Comment</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Kopurua</translation> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, ez da arrakastaz emititu oraindik</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>ezezaguna</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Transakzioaren xehetasunak</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Panel honek transakzioaren deskribapen xehea erakusten du</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Mota</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Helbidea</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Kopurua</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Zabalik %1 arte</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 konfirmazio)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Konfirmatuta (%1 konfirmazio)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Bloke hau ez du beste inongo nodorik jaso, eta seguruenik ez da onartuko!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Sortua, baina ez onartua</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Jasoa honekin: </translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Honi bidalia: </translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Ordainketa zeure buruari</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Bildua</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transakzioaren egoera. Pasatu sagua gainetik konfirmazio kopurua ikusteko.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Transakzioa jasotako data eta ordua.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Transakzio mota.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Transakzioaren xede-helbidea.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Saldoan kendu edo gehitutako kopurua.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Denak</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Gaur</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Aste honetan</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Hil honetan</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Azken hilean</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Aurten</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Muga...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Jasota honekin: </translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Hona bidalia: </translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Zeure buruari</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Bildua</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Beste</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Sartu bilatzeko helbide edo etiketa</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Kopuru minimoa</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopiatu helbidea</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopiatu etiketa</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Transakzioaren xehetasunak</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Komaz bereizitako artxiboa (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Mota</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiketa</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Helbidea</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Kopurua</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Errorea esportatzean</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Ezin idatzi %1 artxiboan.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>to</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>POPCoin version</source> <translation>Botcoin bertsioa</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>Send command to -server or popcoind</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Komandoen lista</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Laguntza komando batean</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Aukerak</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: popcoin.conf)</source> <translation>Ezarpen fitxategia aukeratu (berezkoa: popcoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: popcoind.pid)</source> <translation>pid fitxategia aukeratu (berezkoa: popcoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation type="unfinished"/> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9247 or testnet: 19247)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation type="unfinished"/> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9347 or testnet: 19347)</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation type="unfinished"/> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=popcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;POPCoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. POPCoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong POPCoin will not work properly.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>SSL options: (see the POPCoin Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation type="unfinished"/> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation type="unfinished"/> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Laguntza mezu hau</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"/> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation type="unfinished"/> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation type="unfinished"/> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of POPCoin</source> <translation type="unfinished"/> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart POPCoin to complete</source> <translation type="unfinished"/> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation type="unfinished"/> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation type="unfinished"/> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation type="unfinished"/> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. POPCoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Birbilatzen...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Zamaketa amaitua</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS><|fim▁end|>
<location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"/> </message>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::io; fn main() { let mut input = String::new(); io::stdin().read_line(&mut input).expect("Failed to read line"); let result = input.trim().parse::<u32>(); if result.is_ok() {<|fim▁hole|> let mut candidate = num - 1; let mut prime = true; while candidate > 1 { if num % candidate == 0 { prime = false; break; } candidate = candidate - 1; } if prime { println!("{} is prime", num); } else { println!("{} is not prime", num); } } }<|fim▁end|>
let num = result.unwrap();
<|file_name|>viewport_rule.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ //! The [`@viewport`][at] at-rule and [`meta`][meta] element. //! //! [at]: https://drafts.csswg.org/css-device-adapt/#atviewport-rule //! [meta]: https://drafts.csswg.org/css-device-adapt/#viewport-meta use crate::context::QuirksMode; use crate::error_reporting::ContextualParseError; use crate::font_metrics::get_metrics_provider_for_product; use crate::media_queries::Device; use crate::parser::{Parse, ParserContext}; use crate::properties::StyleBuilder; use crate::rule_cache::RuleCacheConditions; use crate::shared_lock::{SharedRwLockReadGuard, StylesheetGuards, ToCssWithGuard}; use crate::str::CssStringWriter; use crate::stylesheets::{Origin, StylesheetInDocument}; use crate::values::computed::{Context, ToComputedValue}; use crate::values::generics::length::LengthPercentageOrAuto; use crate::values::generics::NonNegative; use crate::values::specified::{self, NoCalcLength}; use crate::values::specified::{NonNegativeLengthPercentageOrAuto, ViewportPercentageLength}; use app_units::Au; use cssparser::CowRcStr; use cssparser::{parse_important, AtRuleParser, DeclarationListParser, DeclarationParser, Parser}; use euclid::Size2D; use selectors::parser::SelectorParseErrorKind; use std::borrow::Cow; use std::cell::RefCell; use std::fmt::{self, Write}; use std::iter::Enumerate; use std::str::Chars; use style_traits::viewport::{Orientation, UserZoom, ViewportConstraints, Zoom}; use style_traits::{CssWriter, ParseError, PinchZoomFactor, StyleParseErrorKind, ToCss}; /// Whether parsing and processing of `@viewport` rules is enabled. #[cfg(feature = "servo")] pub fn enabled() -> bool { use servo_config::pref; pref!(layout.viewport.enabled) } /// Whether parsing and processing of `@viewport` rules is enabled. #[cfg(not(feature = "servo"))] pub fn enabled() -> bool { false // Gecko doesn't support @viewport. } macro_rules! declare_viewport_descriptor { ( $( $variant_name: expr => $variant: ident($data: ident), )+ ) => { declare_viewport_descriptor_inner!([] [ $( $variant_name => $variant($data), )+ ] 0); }; } macro_rules! declare_viewport_descriptor_inner { ( [ $( $assigned_variant_name: expr => $assigned_variant: ident($assigned_data: ident) = $assigned_discriminant: expr, )* ] [ $next_variant_name: expr => $next_variant: ident($next_data: ident), $( $variant_name: expr => $variant: ident($data: ident), )* ] $next_discriminant: expr ) => { declare_viewport_descriptor_inner! { [ $( $assigned_variant_name => $assigned_variant($assigned_data) = $assigned_discriminant, )* $next_variant_name => $next_variant($next_data) = $next_discriminant, ] [ $( $variant_name => $variant($data), )* ] $next_discriminant + 1 } }; ( [ $( $assigned_variant_name: expr => $assigned_variant: ident($assigned_data: ident) = $assigned_discriminant: expr, )* ] [ ] $number_of_variants: expr ) => { #[derive(Clone, Debug, PartialEq, ToShmem)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] #[allow(missing_docs)] pub enum ViewportDescriptor { $( $assigned_variant($assigned_data), )+ } const VIEWPORT_DESCRIPTOR_VARIANTS: usize = $number_of_variants; impl ViewportDescriptor { #[allow(missing_docs)] pub fn discriminant_value(&self) -> usize { match *self { $( ViewportDescriptor::$assigned_variant(..) => $assigned_discriminant, )* } } } impl ToCss for ViewportDescriptor { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { match *self { $( ViewportDescriptor::$assigned_variant(ref val) => { dest.write_str($assigned_variant_name)?; dest.write_str(": ")?; val.to_css(dest)?; }, )* } dest.write_str(";") } } }; } declare_viewport_descriptor! { "min-width" => MinWidth(ViewportLength), "max-width" => MaxWidth(ViewportLength), "min-height" => MinHeight(ViewportLength), "max-height" => MaxHeight(ViewportLength), "zoom" => Zoom(Zoom), "min-zoom" => MinZoom(Zoom), "max-zoom" => MaxZoom(Zoom), "user-zoom" => UserZoom(UserZoom), "orientation" => Orientation(Orientation), } trait FromMeta: Sized { fn from_meta(value: &str) -> Option<Self>; } /// ViewportLength is a length | percentage | auto | extend-to-zoom /// See: /// * http://dev.w3.org/csswg/css-device-adapt/#min-max-width-desc /// * http://dev.w3.org/csswg/css-device-adapt/#extend-to-zoom #[allow(missing_docs)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] #[derive(Clone, Debug, PartialEq, ToCss, ToShmem)] pub enum ViewportLength { Specified(NonNegativeLengthPercentageOrAuto), ExtendToZoom, } impl FromMeta for ViewportLength { fn from_meta(value: &str) -> Option<ViewportLength> { macro_rules! specified { ($value:expr) => { ViewportLength::Specified(LengthPercentageOrAuto::LengthPercentage(NonNegative( specified::LengthPercentage::Length($value), ))) }; } Some(match value { v if v.eq_ignore_ascii_case("device-width") => specified!( NoCalcLength::ViewportPercentage(ViewportPercentageLength::Vw(100.)) ), v if v.eq_ignore_ascii_case("device-height") => specified!( NoCalcLength::ViewportPercentage(ViewportPercentageLength::Vh(100.)) ), _ => match value.parse::<f32>() { Ok(n) if n >= 0. => specified!(NoCalcLength::from_px(n.max(1.).min(10000.))), Ok(_) => return None, Err(_) => specified!(NoCalcLength::from_px(1.)), }, }) } } impl ViewportLength { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { // we explicitly do not accept 'extend-to-zoom', since it is a UA // internal value for <META> viewport translation NonNegativeLengthPercentageOrAuto::parse(context, input).map(ViewportLength::Specified) } } impl FromMeta for Zoom { fn from_meta(value: &str) -> Option<Zoom> { Some(match value { v if v.eq_ignore_ascii_case("yes") => Zoom::Number(1.), v if v.eq_ignore_ascii_case("no") => Zoom::Number(0.1), v if v.eq_ignore_ascii_case("device-width") => Zoom::Number(10.), v if v.eq_ignore_ascii_case("device-height") => Zoom::Number(10.), _ => match value.parse::<f32>() { Ok(n) if n >= 0. => Zoom::Number(n.max(0.1).min(10.)), Ok(_) => return None, Err(_) => Zoom::Number(0.1), }, }) } } impl FromMeta for UserZoom { fn from_meta(value: &str) -> Option<UserZoom> { Some(match value { v if v.eq_ignore_ascii_case("yes") => UserZoom::Zoom, v if v.eq_ignore_ascii_case("no") => UserZoom::Fixed, v if v.eq_ignore_ascii_case("device-width") => UserZoom::Zoom, v if v.eq_ignore_ascii_case("device-height") => UserZoom::Zoom, _ => match value.parse::<f32>() { Ok(n) if n >= 1. || n <= -1. => UserZoom::Zoom, _ => UserZoom::Fixed, }, }) } } struct ViewportRuleParser<'a, 'b: 'a> { context: &'a ParserContext<'b>, } #[derive(Clone, Debug, PartialEq, ToShmem)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] #[allow(missing_docs)] pub struct ViewportDescriptorDeclaration { pub origin: Origin, pub descriptor: ViewportDescriptor, pub important: bool, } impl ViewportDescriptorDeclaration { #[allow(missing_docs)] pub fn new( origin: Origin, descriptor: ViewportDescriptor, important: bool, ) -> ViewportDescriptorDeclaration { ViewportDescriptorDeclaration { origin: origin, descriptor: descriptor, important: important, } } } impl ToCss for ViewportDescriptorDeclaration { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { self.descriptor.to_css(dest)?; if self.important { dest.write_str(" !important")?; } dest.write_str(";") } } fn parse_shorthand<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<(ViewportLength, ViewportLength), ParseError<'i>> { let min = ViewportLength::parse(context, input)?; match input.try_parse(|i| ViewportLength::parse(context, i)) { Err(_) => Ok((min.clone(), min)), Ok(max) => Ok((min, max)), } } impl<'a, 'b, 'i> AtRuleParser<'i> for ViewportRuleParser<'a, 'b> { type PreludeNoBlock = (); type PreludeBlock = (); type AtRule = Vec<ViewportDescriptorDeclaration>; type Error = StyleParseErrorKind<'i>; } impl<'a, 'b, 'i> DeclarationParser<'i> for ViewportRuleParser<'a, 'b> { type Declaration = Vec<ViewportDescriptorDeclaration>; type Error = StyleParseErrorKind<'i>; fn parse_value<'t>( &mut self, name: CowRcStr<'i>, input: &mut Parser<'i, 't>, ) -> Result<Vec<ViewportDescriptorDeclaration>, ParseError<'i>> { macro_rules! declaration { ($declaration:ident($parse:expr)) => { declaration!($declaration { value: $parse(input)?, important: input.try_parse(parse_important).is_ok(), }) }; ($declaration:ident { value: $value:expr, important: $important:expr, }) => { ViewportDescriptorDeclaration::new( self.context.stylesheet_origin, ViewportDescriptor::$declaration($value), $important, ) }; } macro_rules! ok { ($declaration:ident($parse:expr)) => { Ok(vec![declaration!($declaration($parse))]) }; (shorthand -> [$min:ident, $max:ident]) => {{ let shorthand = parse_shorthand(self.context, input)?; let important = input.try_parse(parse_important).is_ok(); Ok(vec![ declaration!($min { value: shorthand.0, important: important, }), declaration!($max { value: shorthand.1, important: important, }), ]) }}; } match_ignore_ascii_case! { &*name, "min-width" => ok!(MinWidth(|i| ViewportLength::parse(self.context, i))), "max-width" => ok!(MaxWidth(|i| ViewportLength::parse(self.context, i))), "width" => ok!(shorthand -> [MinWidth, MaxWidth]), "min-height" => ok!(MinHeight(|i| ViewportLength::parse(self.context, i))), "max-height" => ok!(MaxHeight(|i| ViewportLength::parse(self.context, i))), "height" => ok!(shorthand -> [MinHeight, MaxHeight]), "zoom" => ok!(Zoom(Zoom::parse)), "min-zoom" => ok!(MinZoom(Zoom::parse)), "max-zoom" => ok!(MaxZoom(Zoom::parse)), "user-zoom" => ok!(UserZoom(UserZoom::parse)), "orientation" => ok!(Orientation(Orientation::parse)), _ => Err(input.new_custom_error(SelectorParseErrorKind::UnexpectedIdent(name.clone()))), } } } /// A `@viewport` rule. #[derive(Clone, Debug, PartialEq, ToShmem)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] pub struct ViewportRule { /// The declarations contained in this @viewport rule. pub declarations: Vec<ViewportDescriptorDeclaration>, } /// Whitespace as defined by DEVICE-ADAPT § 9.2 // TODO: should we just use whitespace as defined by HTML5? const WHITESPACE: &'static [char] = &['\t', '\n', '\r', ' ']; /// Separators as defined by DEVICE-ADAPT § 9.2 // need to use \x2c instead of ',' due to test-tidy const SEPARATOR: &'static [char] = &['\x2c', ';']; #[inline] fn is_whitespace_separator_or_equals(c: &char) -> bool { WHITESPACE.contains(c) || SEPARATOR.contains(c) || *c == '=' } impl ViewportRule { /// Parse a single @viewport rule. /// /// TODO(emilio): This could use the `Parse` trait now. pub fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { let parser = ViewportRuleParser { context }; let mut cascade = Cascade::new(); let mut parser = DeclarationListParser::new(input, parser); while let Some(result) = parser.next() { match result { Ok(declarations) => { for declarations in declarations { cascade.add(Cow::Owned(declarations)) } }, Err((error, slice)) => { let location = error.location; let error = ContextualParseError::UnsupportedViewportDescriptorDeclaration( slice, error, ); context.log_css_error(location, error); }, } } Ok(ViewportRule { declarations: cascade.finish(), }) } } impl ViewportRule { #[allow(missing_docs)] pub fn from_meta(content: &str) -> Option<ViewportRule> { let mut declarations = vec![None; VIEWPORT_DESCRIPTOR_VARIANTS]; macro_rules! push_descriptor { ($descriptor:ident($value:expr)) => {{ let descriptor = ViewportDescriptor::$descriptor($value); let discriminant = descriptor.discriminant_value(); declarations[discriminant] = Some(ViewportDescriptorDeclaration::new( Origin::Author, descriptor, false, )); }}; } let mut has_width = false; let mut has_height = false; let mut has_zoom = false; let mut iter = content.chars().enumerate(); macro_rules! start_of_name { ($iter:ident) => { $iter .by_ref() .skip_while(|&(_, c)| is_whitespace_separator_or_equals(&c)) .next() }; } while let Some((start, _)) = start_of_name!(iter) { let property = ViewportRule::parse_meta_property(content, &mut iter, start); if let Some((name, value)) = property { macro_rules! push { ($descriptor:ident($translate:path)) => { if let Some(value) = $translate(value) { push_descriptor!($descriptor(value)); } }; } match name { n if n.eq_ignore_ascii_case("width") => { if let Some(value) = ViewportLength::from_meta(value) { push_descriptor!(MinWidth(ViewportLength::ExtendToZoom)); push_descriptor!(MaxWidth(value)); has_width = true; } }, n if n.eq_ignore_ascii_case("height") => { if let Some(value) = ViewportLength::from_meta(value) { push_descriptor!(MinHeight(ViewportLength::ExtendToZoom)); push_descriptor!(MaxHeight(value)); has_height = true; } }, n if n.eq_ignore_ascii_case("initial-scale") => { if let Some(value) = Zoom::from_meta(value) { push_descriptor!(Zoom(value)); has_zoom = true; } }, n if n.eq_ignore_ascii_case("minimum-scale") => push!(MinZoom(Zoom::from_meta)), n if n.eq_ignore_ascii_case("maximum-scale") => push!(MaxZoom(Zoom::from_meta)), n if n.eq_ignore_ascii_case("user-scalable") => { push!(UserZoom(UserZoom::from_meta)) }, _ => {}, } } } // DEVICE-ADAPT § 9.4 - The 'width' and 'height' properties // http://dev.w3.org/csswg/css-device-adapt/#width-and-height-properties if !has_width && has_zoom { if has_height { push_descriptor!(MinWidth(ViewportLength::Specified( LengthPercentageOrAuto::Auto ))); push_descriptor!(MaxWidth(ViewportLength::Specified( LengthPercentageOrAuto::Auto ))); } else { push_descriptor!(MinWidth(ViewportLength::ExtendToZoom)); push_descriptor!(MaxWidth(ViewportLength::ExtendToZoom)); } } let declarations: Vec<_> = declarations.into_iter().filter_map(|entry| entry).collect(); if !declarations.is_empty() { Some(ViewportRule { declarations: declarations, }) } else { None } } fn parse_meta_property<'a>( content: &'a str, iter: &mut Enumerate<Chars<'a>>, start: usize, ) -> Option<(&'a str, &'a str)> { fn end_of_token(iter: &mut Enumerate<Chars>) -> Option<(usize, char)> { iter.by_ref() .skip_while(|&(_, c)| !is_whitespace_separator_or_equals(&c)) .next() } fn skip_whitespace(iter: &mut Enumerate<Chars>) -> Option<(usize, char)> { iter.by_ref() .skip_while(|&(_, c)| WHITESPACE.contains(&c)) .next() } // <name> <whitespace>* '=' let end = match end_of_token(iter) { Some((end, c)) if WHITESPACE.contains(&c) => match skip_whitespace(iter) { Some((_, c)) if c == '=' => end, _ => return None, }, Some((end, c)) if c == '=' => end, _ => return None, }; let name = &content[start..end]; // <whitespace>* <value> let start = match skip_whitespace(iter) { Some((start, c)) if !SEPARATOR.contains(&c) => start, _ => return None, }; let value = match end_of_token(iter) { Some((end, _)) => &content[start..end], _ => &content[start..], }; Some((name, value)) } } impl ToCssWithGuard for ViewportRule { // Serialization of ViewportRule is not specced. fn to_css(&self, _guard: &SharedRwLockReadGuard, dest: &mut CssStringWriter) -> fmt::Result { dest.write_str("@viewport { ")?; let mut iter = self.declarations.iter(); iter.next().unwrap().to_css(&mut CssWriter::new(dest))?; for declaration in iter { dest.write_str(" ")?; declaration.to_css(&mut CssWriter::new(dest))?; } dest.write_str(" }") } } /// Computes the cascade precedence as according to /// <http://dev.w3.org/csswg/css-cascade/#cascade-origin> fn cascade_precendence(origin: Origin, important: bool) -> u8 { match (origin, important) { (Origin::UserAgent, true) => 1, (Origin::User, true) => 2, (Origin::Author, true) => 3, (Origin::Author, false) => 4, (Origin::User, false) => 5, (Origin::UserAgent, false) => 6, } } impl ViewportDescriptorDeclaration { fn higher_or_equal_precendence(&self, other: &ViewportDescriptorDeclaration) -> bool { let self_precedence = cascade_precendence(self.origin, self.important); let other_precedence = cascade_precendence(other.origin, other.important); self_precedence <= other_precedence } } #[allow(missing_docs)] pub struct Cascade { declarations: Vec<Option<(usize, ViewportDescriptorDeclaration)>>, count_so_far: usize, } #[allow(missing_docs)] impl Cascade { pub fn new() -> Self { Cascade { declarations: vec![None; VIEWPORT_DESCRIPTOR_VARIANTS], count_so_far: 0, } } pub fn from_stylesheets<'a, I, S>( stylesheets: I, guards: &StylesheetGuards, device: &Device, ) -> Self where I: Iterator<Item = (&'a S, Origin)>, S: StylesheetInDocument + 'static, { let mut cascade = Self::new(); for (stylesheet, origin) in stylesheets { stylesheet.effective_viewport_rules(device, guards.for_origin(origin), |rule| { for declaration in &rule.declarations { cascade.add(Cow::Borrowed(declaration)) } }) } cascade } pub fn add(&mut self, declaration: Cow<ViewportDescriptorDeclaration>) { let descriptor = declaration.descriptor.discriminant_value(); match self.declarations[descriptor] { Some((ref mut order_of_appearance, ref mut entry_declaration)) => { if declaration.higher_or_equal_precendence(entry_declaration) { *entry_declaration = declaration.into_owned(); *order_of_appearance = self.count_so_far; } }, ref mut entry @ None => { *entry = Some((self.count_so_far, declaration.into_owned())); }, } self.count_so_far += 1; } pub fn finish(mut self) -> Vec<ViewportDescriptorDeclaration> { // sort the descriptors by order of appearance self.declarations .sort_by_key(|entry| entry.as_ref().map(|&(index, _)| index)); self.declarations .into_iter() .filter_map(|entry| entry.map(|(_, decl)| decl)) .collect() } } /// Just a helper trait to be able to implement methods on ViewportConstraints. pub trait MaybeNew { /// Create a ViewportConstraints from a viewport size and a `@viewport` /// rule. fn maybe_new( device: &Device, rule: &ViewportRule, quirks_mode: QuirksMode, ) -> Option<ViewportConstraints>; } impl MaybeNew for ViewportConstraints { fn maybe_new( device: &Device, rule: &ViewportRule, quirks_mode: QuirksMode, ) -> Option<ViewportConstraints> { use std::cmp; if rule.declarations.is_empty() { return None; } let mut min_width = None; let mut max_width = None; let mut min_height = None; let mut max_height = None; let mut initial_zoom = None; let mut min_zoom = None; let mut max_zoom = None; let mut user_zoom = UserZoom::Zoom; let mut orientation = Orientation::Auto; // collapse the list of declarations into descriptor values for declaration in &rule.declarations { match declaration.descriptor { ViewportDescriptor::MinWidth(ref value) => min_width = Some(value), ViewportDescriptor::MaxWidth(ref value) => max_width = Some(value), ViewportDescriptor::MinHeight(ref value) => min_height = Some(value), ViewportDescriptor::MaxHeight(ref value) => max_height = Some(value), ViewportDescriptor::Zoom(value) => initial_zoom = value.to_f32(), ViewportDescriptor::MinZoom(value) => min_zoom = value.to_f32(), ViewportDescriptor::MaxZoom(value) => max_zoom = value.to_f32(), ViewportDescriptor::UserZoom(value) => user_zoom = value, ViewportDescriptor::Orientation(value) => orientation = value, } } // TODO: return `None` if all descriptors are either absent or initial value macro_rules! choose { ($op:ident, $opta:expr, $optb:expr) => { match ($opta, $optb) { (None, None) => None, (a, None) => a, (None, b) => b, (Some(a), Some(b)) => Some(a.$op(b)), } }; } macro_rules! min { ($opta:expr, $optb:expr) => { choose!(min, $opta, $optb) }; } macro_rules! max { ($opta:expr, $optb:expr) => { choose!(max, $opta, $optb) }; } // DEVICE-ADAPT § 6.2.1 Resolve min-zoom and max-zoom values if min_zoom.is_some() && max_zoom.is_some() { max_zoom = Some(min_zoom.unwrap().max(max_zoom.unwrap())) } // DEVICE-ADAPT § 6.2.2 Constrain zoom value to the [min-zoom, max-zoom] range if initial_zoom.is_some() { initial_zoom = max!(min_zoom, min!(max_zoom, initial_zoom)); } // DEVICE-ADAPT § 6.2.3 Resolve non-auto lengths to pixel lengths let initial_viewport = device.au_viewport_size(); let provider = get_metrics_provider_for_product(); let mut conditions = RuleCacheConditions::default(); let context = Context { // Note: DEVICE-ADAPT § 5. states that relative length values are // resolved against initial values builder: StyleBuilder::for_inheritance(device, None, None), font_metrics_provider: &provider, cached_system_font: None, in_media_query: false, quirks_mode: quirks_mode, for_smil_animation: false, for_non_inherited_property: None, rule_cache_conditions: RefCell::new(&mut conditions), }; // DEVICE-ADAPT § 9.3 Resolving 'extend-to-zoom' let extend_width; let extend_height; if let Some(extend_zoom) = max!(initial_zoom, max_zoom) { let scale_factor = 1. / extend_zoom; extend_width = Some(initial_viewport.width.scale_by(scale_factor)); extend_height = Some(initial_viewport.height.scale_by(scale_factor)); } else { extend_width = None; extend_height = None; } macro_rules! to_pixel_length { ($value:ident, $dimension:ident, $extend_to:ident => $auto_extend_to:expr) => { if let Some($value) = $value { match *$value { ViewportLength::Specified(ref length) => match *length { LengthPercentageOrAuto::Auto => None, LengthPercentageOrAuto::LengthPercentage(ref lop) => Some( lop.to_computed_value(&context) .to_used_value(initial_viewport.$dimension), ), }, ViewportLength::ExtendToZoom => { // $extend_to will be 'None' if 'extend-to-zoom' is 'auto' match ($extend_to, $auto_extend_to) { (None, None) => None, (a, None) => a, (None, b) => b, (a, b) => cmp::max(a, b), } }, } } else { None } }; } // DEVICE-ADAPT § 9.3 states that max-descriptors need to be resolved // before min-descriptors. // http://dev.w3.org/csswg/css-device-adapt/#resolve-extend-to-zoom let max_width = to_pixel_length!(max_width, width, extend_width => None); let max_height = to_pixel_length!(max_height, height, extend_height => None); let min_width = to_pixel_length!(min_width, width, extend_width => max_width); let min_height = to_pixel_length!(min_height, height, extend_height => max_height); // DEVICE-ADAPT § 6.2.4 Resolve initial width and height from min/max descriptors macro_rules! resolve { ($min:ident, $max:ident, $initial:expr) => { if $min.is_some() || $max.is_some() {<|fim▁hole|> Some(match $min { Some(min) => cmp::max(min, max), None => max, }) } else { None }; }; } let width = resolve!(min_width, max_width, initial_viewport.width); let height = resolve!(min_height, max_height, initial_viewport.height); // DEVICE-ADAPT § 6.2.5 Resolve width value let width = if width.is_none() && height.is_none() { Some(initial_viewport.width) } else { width }; let width = width.unwrap_or_else(|| match initial_viewport.height { Au(0) => initial_viewport.width, initial_height => { let ratio = initial_viewport.width.to_f32_px() / initial_height.to_f32_px(); Au::from_f32_px(height.unwrap().to_f32_px() * ratio) }, }); // DEVICE-ADAPT § 6.2.6 Resolve height value let height = height.unwrap_or_else(|| match initial_viewport.width { Au(0) => initial_viewport.height, initial_width => { let ratio = initial_viewport.height.to_f32_px() / initial_width.to_f32_px(); Au::from_f32_px(width.to_f32_px() * ratio) }, }); Some(ViewportConstraints { size: Size2D::new(width.to_f32_px(), height.to_f32_px()), // TODO: compute a zoom factor for 'auto' as suggested by DEVICE-ADAPT § 10. initial_zoom: PinchZoomFactor::new(initial_zoom.unwrap_or(1.)), min_zoom: min_zoom.map(PinchZoomFactor::new), max_zoom: max_zoom.map(PinchZoomFactor::new), user_zoom: user_zoom, orientation: orientation, }) } }<|fim▁end|>
let max = match $max { Some(max) => cmp::min(max, $initial), None => $initial, };
<|file_name|>datasources.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Fri Feb 24 12:49:36 2017 @author: drsmith """ import os from .globals import FdpError<|fim▁hole|> 'diiid': ['diiid', 'diii-d', 'd3d'], 'cmod': ['cmod', 'c-mod']} for key, value in aliases.items(): if machine.lower() in value: return key # invalid machine name raise FdpError('"{}" is not a valid machine name\n'.format(machine)) MDS_SERVERS = { 'nstxu': {'hostname': 'skylark.pppl.gov', 'port': '8000'}, 'diiid': {'hostname': 'atlas.gat.com', 'port': '8000'} } EVENT_SERVERS = { 'nstxu': {'hostname': 'skylark.pppl.gov', 'port': '8000'}, 'diiid': {'hostname': 'atlas.gat.com', 'port': '8000'}, 'ltx': {'hostname': 'lithos.pppl.gov', 'port': '8000'} } LOGBOOK_CREDENTIALS = { 'nstxu': {'server': 'sql2008.pppl.gov', 'instance': None, 'username': None, 'password': None, 'database': None, 'port': '62917', 'table': 'entries', 'loginfile': os.path.join(os.getenv('HOME'), 'nstxlogs.sybase_login') } }<|fim▁end|>
def canonicalMachineName(machine=''): aliases = {'nstxu': ['nstx', 'nstxu', 'nstx-u'],
<|file_name|>test_notification_sched.py<|end_file_name|><|fim▁begin|>from twisted.internet.defer import inlineCallbacks, fail, succeed from globaleaks import models from globaleaks.orm import transact from globaleaks.tests import helpers from globaleaks.jobs.delivery_sched import DeliverySchedule from globaleaks.jobs.notification_sched import NotificationSchedule, MailGenerator class TestNotificationSchedule(helpers.TestGLWithPopulatedDB): @inlineCallbacks def setUp(self): yield helpers.TestGLWithPopulatedDB.setUp(self) yield self.perform_full_submission_actions() @transact def get_scheduled_email_count(self, store): return store.find(models.Mail).count() @inlineCallbacks<|fim▁hole|> def test_notification_schedule_success(self): count = yield self.get_scheduled_email_count() self.assertEqual(count, 0) yield DeliverySchedule().run() notification_schedule = NotificationSchedule() notification_schedule.skip_sleep = True yield notification_schedule.run() count = yield self.get_scheduled_email_count() self.assertEqual(count, 0) @inlineCallbacks def test_notification_schedule_failure(self): count = yield self.get_scheduled_email_count() self.assertEqual(count, 0) yield DeliverySchedule().run() notification_schedule = NotificationSchedule() notification_schedule.skip_sleep = True def sendmail(x, y, z): return fail(True) notification_schedule.sendmail = sendmail for i in range(0, 10): yield notification_schedule.run() count = yield self.get_scheduled_email_count() self.assertEqual(count, 40) yield notification_schedule.run() count = yield self.get_scheduled_email_count() self.assertEqual(count, 0)<|fim▁end|>
<|file_name|>cache.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 - Oscar Campos <[email protected]> # This program is Free Software see LICENSE file for details import os import json import platform from collections import defaultdict<|fim▁hole|>cachepath = { 'linux': os.path.join('~', '.local', 'share', 'anaconda', 'cache'), 'darwin': os.path.join('~', 'Library', 'Cache', 'anaconda'), 'windows': os.path.join(os.getenv('APPDATA') or '~', 'Anaconda', 'Cache') } cache_directory = os.path.expanduser( cachepath.get(platform.system().lower()) ) PACKAGES_CACHE = defaultdict(lambda: []) def append(package: typing.Dict) -> None: """Append the given package into the cache """ global PACKAGES_CACHE if not package_in_cache(package): PACKAGES_CACHE[go.GOROOT].append(package) def package_in_cache(package: typing.Dict) -> bool: """Look for the given package in the cache and return true if is there """ for pkg in PACKAGES_CACHE[go.GOROOT]: if pkg['ImportPath'] == package['ImportPath']: return True return False def lookup(node_name: str='') -> typing.Dict: """Lookup the given node_name in the cache and return it back """ node = {} if node_name == '': node = PACKAGES_CACHE[go.GOROOT] else: for pkg in PACKAGES_CACHE[go.GOROOT]: guru = pkg.get('Guru') if guru is None: continue path = guru['package'].get('path') if path is not None and path == node_name: node = guru break for member in guru['package'].get('members', []): if member.get('name') == node_name: node = member break for method in member.get('methods', []): if method['name'] == node_name: node = method break return node def persist_package_cache() -> None: """Write the contents of the package cache for this GOROOT into the disk """ gopath = go.GOPATH.replace(os.path.sep, '_') cachefile = os.path.join(cache_directory, gopath, 'packages.cache') if not os.path.exists(os.path.dirname(cachefile)): os.makedirs(os.path.dirname(cachefile)) with open(cachefile, 'w') as fd: json.dump(PACKAGES_CACHE[go.GOROOT], fd) def load_package_cache() -> typing.List: """Load a previously stores package cache file """ global PACKAGES_CACHE gopath = go.GOPATH.replace(os.path.sep, '_') cachefile = os.path.join(cache_directory, gopath, 'packages.cache') try: with open(cachefile, 'r') as fd: PACKAGES_CACHE[go.GOROOT] = json.load(fd) except FileNotFoundError: pass<|fim▁end|>
from anaconda_go.lib import go from anaconda_go.lib.plugin import typing
<|file_name|>keyword-for-as-identifier.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags: -Z parse-only // This file was auto-generated using 'src/etc/generate-keyword-tests.py for' fn main() { let for = "foo"; //~ error: expected pattern, found keyword `for` }<|fim▁end|>
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //
<|file_name|>ScanAnimaitonStrategy.java<|end_file_name|><|fim▁begin|>package com.bruce.android.knowledge.custom_view.scanAnimation; /** * @author zhenghao.qi * @version 1.0 * @time 2015年11月09日14:45:32 */ public class ScanAnimaitonStrategy implements IAnimationStrategy { /** * 起始X坐标 */ private int startX; /** * 起始Y坐标 */ private int startY; /** * 起始点到终点的Y轴位移。 */ private int shift; /** * X Y坐标。 */ private double currentX, currentY; /** * 动画开始时间。 */ private long startTime; /** * 循环时间 */ private long cyclePeriod; /** * 动画正在进行时值为true,反之为false。 */ private boolean doing; <|fim▁hole|> /** * 进行动画展示的view */ private AnimationSurfaceView animationSurfaceView; public ScanAnimaitonStrategy(AnimationSurfaceView animationSurfaceView, int shift, long cyclePeriod) { this.animationSurfaceView = animationSurfaceView; this.shift = shift; this.cyclePeriod = cyclePeriod; initParams(); } public void start() { startTime = System.currentTimeMillis(); doing = true; } /** * 设置起始位置坐标 */ private void initParams() { int[] position = new int[2]; animationSurfaceView.getLocationInWindow(position); this.startX = position[0]; this.startY = position[1]; } /** * 根据当前时间计算小球的X/Y坐标。 */ public void compute() { long intervalTime = (System.currentTimeMillis() - startTime) % cyclePeriod; double angle = Math.toRadians(360 * 1.0d * intervalTime / cyclePeriod); int y = (int) (shift / 2 * Math.cos(angle)); y = Math.abs(y - shift/2); currentY = startY + y; doing = true; } @Override public boolean doing() { return doing; } public double getX() { return currentX; } public double getY() { return currentY; } public void cancel() { doing = false; } }<|fim▁end|>
<|file_name|>breakctr2.rs<|end_file_name|><|fim▁begin|>use std::env; use std::slice; use std::io::prelude::*; use common::{err, challenge}; use common::cipher::one_byte_xor as obx; use set3::breakctr; pub static info: challenge::Info = challenge::Info { no: 20, title: "Break fixed-nonce CTR statistically", help: "param1: path to file containing base64 encoded plain strings", execute_fn: interactive }; pub const min_chars_for_charfreq: usize = 10; // minimum number of characters required for breaking based on character frequency // break ctr cipher one column at a time // input: a list of cipher strings encrypted usinf CTR with same nonce // output: keystream // pub fn break_ctr(ciphers: &Vec<Vec<u8>>) -> Result<Vec<u8>, err::Error> { let mut cipher_its: Vec<slice::Iter<u8>> = Vec::new(); let mut keystream = Vec::<u8>::new(); for c in ciphers { cipher_its.push(c.iter()); } let mut all_ciphers_done = false; let mut col_no = 0; while ! all_ciphers_done { //println!("col no: {}", col_no); let mut col = Vec::<u8>::new(); // extract a column for it in cipher_its.iter_mut() {<|fim▁hole|> Some(v) => col.push(*v), None => {} }; } let mut options = obx::GuessOptions::new(); if col_no == 0 { // first column has a lot of upper case letters, hence, does not break correctly // with standard character frequencies fn upper_letters_distance_fn(input: &str) -> Result<f32, err::Error> { let count: usize = input.chars().filter(|c| *c >= 'A' && *c <= 'Z').count(); Ok(1f32/ count as f32) // inverse, since the key is guessed for minimum distance } options.set_distance_fn(upper_letters_distance_fn); } if col.len() < min_chars_for_charfreq && col.len() > 0 { // the input is too short, so, we take a shot // assuming all are valid // word characters fn last_chars_distance_fn(input: &str) -> Result<f32, err::Error> { let count: usize = input.chars().filter(|c| (*c >= 'A' && *c <= 'Z') || (*c >= 'a' && *c <= 'z')).count(); Ok(1f32 / count as f32) } options.set_distance_fn(last_chars_distance_fn) } if col.len() > 0 { keystream.push(try!(obx::guess_key(&col, Some(&options))).key); } else { all_ciphers_done = true; } col_no += 1; } Ok(keystream) } pub fn break_ctr_with_manual_guess_for_last_chars(ciphers: &Vec<Vec<u8>>, guesses: &Vec<(usize, &str)>) -> Result<Vec<String>, err::Error> { let keystream = try!(break_ctr(&ciphers)); let plains = try!(breakctr::manual_guess_for_last_chars(&ciphers, &keystream, &guesses)); Ok(plains) } pub fn interactive() -> err::ExitCode { let input_filepath = match env::args().nth(2) { Some(v) => v, None => { println!("please specify input plain data (base64 encoded) filepath"); return exit_err!(); } }; let ciphers = rtry!(breakctr::generate_ciphers_from_file(&input_filepath), exit_err!()); let plains = rtry!(break_ctr_with_manual_guess_for_last_chars(&ciphers, &vec![]), exit_err!()); for p in plains { println!("{}", p); } exit_ok!() }<|fim▁end|>
match it.next() {
<|file_name|>KEY.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.7.1 (function() { var M, TRM, TYPES, alert, badge, debug, echo, help, info, log, njs_fs, options, rainbow, rpr, urge, warn, whisper, __slice = [].slice; njs_fs = require('fs'); TYPES = require('coffeenode-types'); TRM = require('coffeenode-trm'); rpr = TRM.rpr.bind(TRM); badge = 'HOLLERITH/KEY'; log = TRM.get_logger('plain', badge); info = TRM.get_logger('info', badge); whisper = TRM.get_logger('whisper', badge); alert = TRM.get_logger('alert', badge); debug = TRM.get_logger('debug', badge); warn = TRM.get_logger('warn', badge); help = TRM.get_logger('help', badge); urge = TRM.get_logger('urge', badge); echo = TRM.echo.bind(TRM); rainbow = TRM.rainbow.bind(TRM); options = require('../../options'); M = options['marks']; /* *=========================================================================================================== 888b 888 8888888888 888 888 8888b 888 888 888 o 888 88888b 888 888 888 d8b 888 888Y88b 888 8888888 888 d888b 888 888 Y88b888 888 888d88888b888 888 Y88888 888 88888P Y88888 888 Y8888 888 8888P Y8888 888 Y888 8888888888 888P Y888 *=========================================================================================================== */ this.new_key = function() { var complement, complement_esc, datatype, idx, idxs, index_count, pad, predicate, predicate_esc, schema, theme, theme_esc, topic, topic_esc, type; schema = arguments[0], theme = arguments[1], topic = arguments[2], predicate = arguments[3], complement = arguments[4], idx = 6 <= arguments.length ? __slice.call(arguments, 5) : []; /* TAINT should go to `hollerith/KEY` */ if ((datatype = schema[predicate]) == null) { throw new Error("unknown datatype " + (rpr(predicate))); } index_count = datatype['index-count'], type = datatype.type, pad = datatype.pad; if (index_count !== idx.length) { throw new Error("need " + index_count + " indices for predicate " + (rpr(predicate)) + ", got " + idx.length); } theme_esc = KEY.esc(theme); topic_esc = KEY.esc(topic); predicate_esc = KEY.esc(predicate); complement_esc = KEY.esc(complement); /* TAINT parametrize */ idxs = index_count === 0 ? '' : idxs.join(','); return 's' + '|' + theme_esc + '|' + topic_esc + '|' + predicate_esc + '|' + complement_esc + '|' + idxs; }; /* *=========================================================================================================== .d88888b. 888 8888888b. d88P" "Y88b 888 888 "Y88b 888 888 888 888 888 888 888 888 888 888 888 888 888 888 888 888 888 888 888 888 Y88b. .d88P 888 888 .d88P "Y88888P" 88888888 8888888P" *=========================================================================================================== */ this.new_route = function(realm, type, name) { var R, part; R = [realm, type]; if (name != null) { R.push(name); } return ((function() { var _i, _len, _results; _results = []; for (_i = 0, _len = R.length; _i < _len; _i++) { part = R[_i]; _results.push(this.esc(part)); } return _results; }).call(this)).join(M['slash']); }; this.new_id = function(realm, type, idn) { var slash; slash = M['slash']; return (this.new_route(realm, type)) + slash + (this.esc(idn)); }; this.new_node = function() { var R, crumb, idn, joiner, realm, tail, type; realm = arguments[0], type = arguments[1], idn = arguments[2], tail = 4 <= arguments.length ? __slice.call(arguments, 3) : []; joiner = M['joiner']; R = M['primary'] + M['node'] + joiner + (this.new_id(realm, type, idn)); if (tail.length > 0) { R += M['slash'] + (((function() { var _i, _len, _results; _results = []; for (_i = 0, _len = tail.length; _i < _len; _i++) { crumb = tail[_i]; _results.push(this.esc(crumb)); } return _results; }).call(this)).join(M['slash'])); } R += joiner; return R; }; this.new_secondary_node = function() { var R, crumb, idn, joiner, realm, slash, tail, type; realm = arguments[0], type = arguments[1], idn = arguments[2], tail = 4 <= arguments.length ? __slice.call(arguments, 3) : []; joiner = M['joiner']; slash = M['slash']; R = M['secondary'] + M['node'] + slash + (this.esc(realm)) + (this.esc(type)); if (tail.length > 0) { R += joiner + (((function() { var _i, _len, _results; _results = []; for (_i = 0, _len = tail.length; _i < _len; _i++) { crumb = tail[_i]; _results.push(this.esc(crumb)); } return _results; }).call(this)).join(slash)); } R += joiner + (this.esc(idn)) + joiner; return R; }; this.new_facet_pair = function(realm, type, idn, name, value, distance) { if (distance == null) { distance = 0; } return [this.new_facet(realm, type, idn, name, value, distance), this.new_secondary_facet(realm, type, idn, name, value, distance)]; }; this.new_facet = function(realm, type, idn, name, value, distance) { var joiner; if (distance == null) { distance = 0; } joiner = M['joiner']; return M['primary'] + M['facet'] + joiner + (this.new_id(realm, type, idn)) + joiner + (this.esc(name)) + joiner + (this.esc(value)) + joiner + distance + joiner; };<|fim▁hole|> var joiner; if (distance == null) { distance = 0; } joiner = M['joiner']; return M['secondary'] + M['facet'] + joiner + (this.new_route(realm, type)) + joiner + (this.esc(name)) + joiner + (this.esc(value)) + joiner + (this.esc(idn)) + joiner + distance + joiner; }; this.new_link_pair = function(realm_0, type_0, idn_0, realm_1, type_1, idn_1, distance) { if (distance == null) { distance = 0; } return [this.new_link(realm_0, type_0, idn_0, realm_1, type_1, idn_1, distance), this.new_secondary_link(realm_0, type_0, idn_0, realm_1, type_1, idn_1, distance)]; }; this.new_link = function(realm_0, type_0, idn_0, realm_1, type_1, idn_1, distance) { var joiner; if (distance == null) { distance = 0; } joiner = M['joiner']; return M['primary'] + M['link'] + joiner + (this.new_id(realm_0, type_0, idn_0)) + joiner + (this.new_id(realm_1, type_1, idn_1)) + joiner + distance + joiner; }; this.new_secondary_link = function(realm_0, type_0, idn_0, realm_1, type_1, idn_1, distance) { var joiner; if (distance == null) { distance = 0; } joiner = M['joiner']; return M['secondary'] + M['link'] + joiner + (this.new_route(realm_0, type_0)) + joiner + (this.new_id(realm_1, type_1, idn_1)) + joiner + (this.esc(idn_0)) + joiner + distance + joiner; }; this.read = function(key) { var R, fields, layer, type, _ref, _ref1; _ref = key.split(M['joiner']), (_ref1 = _ref[0], layer = _ref1[0], type = _ref1[1]), fields = 2 <= _ref.length ? __slice.call(_ref, 1) : []; switch (layer) { case M['primary']: switch (type) { case M['node']: R = this._read_primary_node.apply(this, fields); break; case M['facet']: R = this._read_primary_facet.apply(this, fields); break; case M['link']: R = this._read_primary_link.apply(this, fields); break; default: throw new Error("unknown type mark " + (rpr(type))); } break; case M['secondary']: switch (type) { case M['facet']: R = this._read_secondary_facet.apply(this, fields); break; case M['link']: R = this._read_secondary_link.apply(this, fields); break; default: throw new Error("unknown type mark " + (rpr(type))); } break; default: throw new Error("unknown layer mark " + (rpr(layer))); } R['key'] = key; return R; }; this._read_primary_node = function(id) { var R; R = { level: 'primary', type: 'node', id: id }; return R; }; this._read_primary_facet = function(id, name, value, distance) { var R; R = { level: 'primary', type: 'facet', id: id, name: name, value: value, distance: parseInt(distance, 10) }; return R; }; this._read_primary_link = function(id_0, id_1, distance) { var R; R = { level: 'primary', type: 'link', id: id_0, target: id_1, distance: parseInt(distance, 10) }; return R; }; this._read_secondary_facet = function(route, name, value, idn, distance) { var R; R = { level: 'secondary', type: 'facet', id: route + M['slash'] + idn, name: name, value: value, distance: parseInt(distance, 10) }; return R; }; this._read_secondary_link = function(route_0, id_1, idn_0, distance) { var R, id_0; id_0 = route_0 + M['slash'] + idn_0; R = { level: 'secondary', type: 'link', id: id_0, target: id_1, distance: parseInt(distance, 10) }; return R; }; this.infer = function(key_0, key_1) { return this._infer(key_0, key_1, 'primary'); }; this.infer_secondary = function(key_0, key_1) { return this._infer(key_0, key_1, 'secondary'); }; this.infer_pair = function(key_0, key_1) { return this._infer(key_0, key_1, 'pair'); }; this._infer = function(key_0, key_1, mode) { var id_1, id_2, info_0, info_1, type_0, type_1; info_0 = TYPES.isa_text(key_0) ? this.read(key_0) : key_0; info_1 = TYPES.isa_text(key_1) ? this.read(key_1) : key_1; if ((type_0 = info_0['type']) === 'link') { if ((id_1 = info_0['target']) !== (id_2 = info_1['id'])) { throw new Error("unable to infer link from " + (rpr(info_0['key'])) + " and " + (rpr(info_1['key']))); } switch (type_1 = info_1['type']) { case 'link': return this._infer_link(info_0, info_1, mode); case 'facet': return this._infer_facet(info_0, info_1, mode); } } throw new Error("expected a link plus a link or a facet, got a " + type_0 + " and a " + type_1); }; this._infer_facet = function(link, facet, mode) { var distance, facet_idn, facet_realm, facet_type, link_idn, link_realm, link_type, name, slash, value, _ref, _ref1; _ref = this.split_id(link['id']), link_realm = _ref[0], link_type = _ref[1], link_idn = _ref[2]; _ref1 = this.split_id(link['id']), facet_realm = _ref1[0], facet_type = _ref1[1], facet_idn = _ref1[2]; /* TAINT route not distinct from ID? */ /* TAINT should slashes in name be escaped? */ /* TAINT what happens when we infer from an inferred facet? do all the escapes get re-escaped? */ /* TAINT use module method */ slash = M['slash']; /* TAINT make use of dash configurable */ name = (this.esc(facet_realm)) + '-' + (this.esc(facet_type)) + '-' + (this.esc(facet['name'])); value = facet['value']; distance = link['distance'] + facet['distance'] + 1; switch (mode) { case 'primary': return this.new_facet(link_realm, link_type, link_idn, name, value, distance); case 'secondary': return this.new_secondary_facet(link_realm, link_type, link_idn, name, value, distance); case 'pair': return this.new_facet_pair(link_realm, link_type, link_idn, name, value, distance); default: throw new Error("unknown mode " + (rpr(mode))); } }; this._infer_link = function(link_0, link_1, mode) { /* $^|gtfs/stoptime/876|0|gtfs/trip/456 + $^|gtfs/trip/456|0|gtfs/route/777 ---------------------------------------------------------------- = $^|gtfs/stoptime/876|1|gtfs/route/777 = %^|gtfs/stoptime|1|gtfs/route/777|876 */ var distance, idn_0, idn_2, realm_0, realm_2, type_0, type_2, _ref, _ref1; _ref = this.split_id(link_0['id']), realm_0 = _ref[0], type_0 = _ref[1], idn_0 = _ref[2]; _ref1 = this.split_id(link_1['target']), realm_2 = _ref1[0], type_2 = _ref1[1], idn_2 = _ref1[2]; distance = link_0['distance'] + link_1['distance'] + 1; switch (mode) { case 'primary': return this.new_link(realm_0, type_0, idn_0, realm_2, type_2, idn_2, distance); case 'secondary': return this.new_secondary_link(realm_0, type_0, idn_0, realm_2, type_2, idn_2, distance); case 'pair': return this.new_link_pair(realm_0, type_0, idn_0, realm_2, type_2, idn_2, distance); default: throw new Error("unknown mode " + (rpr(mode))); } }; this.esc = (function() { /* TAINT too complected */ var d, escape, joiner_matcher, joiner_replacer; escape = function(text) { var R; R = text; R = R.replace(/([-()\[\]{}+?*.$\^|,:#<!\\])/g, '\\$1'); R = R.replace(/\x08/g, '\\x08'); return R; }; joiner_matcher = new RegExp(escape(M['joiner']), 'g'); /* TAINT not correct, could be single digit if byte value < 0x10 */ joiner_replacer = ((function() { var _i, _len, _ref, _results; _ref = new Buffer(M['joiner']); _results = []; for (_i = 0, _len = _ref.length; _i < _len; _i++) { d = _ref[_i]; _results.push('µ' + d.toString(16)); } return _results; })()).join(''); return function(x) { var R; if (x === void 0) { throw new Error("value cannot be undefined"); } R = TYPES.isa_text(x) ? x : rpr(x); R = R.replace(/µ/g, 'µb5'); R = R.replace(joiner_matcher, joiner_replacer); return R; }; })(); this.unescape = function(text_esc) { var matcher; matcher = /µ([0-9a-f]{2})/g; return text_esc.replace(matcher, function(_, cid_hex) { return String.fromCharCode(parseInt(cid_hex, 16)); }); }; this.split_id = function(id) { /* TAINT must unescape */ var R, slash; R = id.split(slash = M['slash']); if (R.length !== 3) { throw new Error("expected three parts separated by " + (rpr(slash)) + ", got " + (rpr(id))); } if (!(R[0].length > 0)) { throw new Error("realm cannot be empty in " + (rpr(id))); } if (!(R[1].length > 0)) { throw new Error("type cannot be empty in " + (rpr(id))); } if (!(R[2].length > 0)) { throw new Error("IDN cannot be empty in " + (rpr(id))); } return R; }; this.split = function(x) { return x.split(M['joiner']); }; this.split_compound_selector = function(compound_selector) { /* TAINT must unescape */ return compound_selector.split(M['loop']); }; this._idn_from_id = function(id) { var match; match = id.replace(/^.+?([^\/]+)$/); if (match == null) { throw new Error("not a valid ID: " + (rpr(id))); } return match[1]; }; this.lte_from_gte = function(gte) { var R, length; length = Buffer.byteLength(gte); R = new Buffer(1 + length); R.write(gte); R[length] = 0xff; return R; }; if (module.parent == null) { help(this.new_id('gtfs', 'stop', '123')); help(this.new_facet('gtfs', 'stop', '123', 'name', 1234)); help(this.new_facet('gtfs', 'stop', '123', 'name', 'foo/bar|baz')); help(this.new_facet('gtfs', 'stop', '123', 'name', 'Bayerischer Platz')); help(this.new_secondary_facet('gtfs', 'stop', '123', 'name', 'Bayerischer Platz')); help(this.new_facet_pair('gtfs', 'stop', '123', 'name', 'Bayerischer Platz')); help(this.new_link('gtfs', 'stoptime', '456', 'gtfs', 'stop', '123')); help(this.new_secondary_link('gtfs', 'stoptime', '456', 'gtfs', 'stop', '123')); help(this.new_link_pair('gtfs', 'stoptime', '456', 'gtfs', 'stop', '123')); help(this.read(this.new_facet('gtfs', 'stop', '123', 'name', 'Bayerischer Platz'))); help(this.read(this.new_secondary_facet('gtfs', 'stop', '123', 'name', 'Bayerischer Platz'))); help(this.read(this.new_link('gtfs', 'stoptime', '456', 'gtfs', 'stop', '123'))); help(this.read(this.new_secondary_link('gtfs', 'stoptime', '456', 'gtfs', 'stop', '123'))); help(this.infer('$^|gtfs/stoptime/876|0|gtfs/trip/456', '$^|gtfs/trip/456|0|gtfs/route/777')); help(this.infer('$^|gtfs/stoptime/876|0|gtfs/trip/456', '%^|gtfs/trip|0|gtfs/route/777|456')); help(this.infer('$^|gtfs/trip/456|0|gtfs/stop/123', '$:|gtfs/stop/123|0|name|Bayerischer Platz')); help(this.infer('$^|gtfs/stoptime/876|1|gtfs/stop/123', '$:|gtfs/stop/123|0|name|Bayerischer Platz')); } }).call(this);<|fim▁end|>
this.new_secondary_facet = function(realm, type, idn, name, value, distance) {
<|file_name|>MapData.java<|end_file_name|><|fim▁begin|>//: net/mindview/util/MapData.java // A Map filled with data using a generator object. package com.example.doun.chapter21concurrency.ReaderWriterMapPack; //package net.mindview.util; import java.util.*; public class MapData<K, V> extends LinkedHashMap<K, V> { // A single Pair Generator: public MapData(Generator<Pair<K, V>> gen, int quantity) { for (int i = 0; i < quantity; i++) { Pair<K, V> p = gen.next(); put(p.key, p.value); } } // Two separate Generators: public MapData(Generator<K> genK, Generator<V> genV, int quantity) { for (int i = 0; i < quantity; i++) { put(genK.next(), genV.next()); } } // A key Generator and a single value: public MapData(Generator<K> genK, V value, int quantity) { for (int i = 0; i < quantity; i++) { put(genK.next(), value); } } // An Iterable and a value Generator: public MapData(Iterable<K> genK, Generator<V> genV) { for (K key : genK) { put(key, genV.next()); } } <|fim▁hole|> // An Iterable and a single value: public MapData(Iterable<K> genK, V value) { for (K key : genK) { put(key, value); } } // Generic convenience methods: public static <K, V> MapData<K, V> map(Generator<Pair<K, V>> gen, int quantity) { return new MapData<K, V>(gen, quantity); } public static <K, V> MapData<K, V> map(Generator<K> genK, Generator<V> genV, int quantity) { return new MapData<K, V>(genK, genV, quantity); } public static <K, V> MapData<K, V> map(Generator<K> genK, V value, int quantity) { return new MapData<K, V>(genK, value, quantity); } public static <K, V> MapData<K, V> map(Iterable<K> genK, Generator<V> genV) { return new MapData<K, V>(genK, genV); } public static <K, V> MapData<K, V> map(Iterable<K> genK, V value) { return new MapData<K, V>(genK, value); } } ///:~<|fim▁end|>
<|file_name|>List.tests.ts<|end_file_name|><|fim▁begin|>import 'mocha'; import * as assert from 'assert'; import List from '../src/List'; suite("List", function () { suite("Should add items and access them using index", function () { test("First entry", function () { // Arrange const target = new List<number>(); const expected = 10; // Act target.add(expected); const actual = target[0]; // Assert assert.equal(actual, expected); }) test("second entry", function () { // Arrange const target = new List<number>(); const expected = 20; // Act target.add(10); target.add(expected); const actual = target[1]; // Assert assert.equal(actual, expected); }) test("Third entry", function () { // Arrange const target = new List<number>(); const expected = 30; // Act target.add(10); target.add(20); target.add(expected); const actual = target[2]; // Assert assert.equal(actual, expected); }) }); test("Should return count of items", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = 3; // Act const actual = target.count; // Assert assert.equal(actual, expected); }); test("Should not be readonly", function () { // Arrange const target = new List<number>(); const expected = false; // Act const actual = target.isReadOnly; // Assert assert.equal(actual, expected); }); test("Should be iterable", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = "123"; // Act let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); suite("Contains", function () { test("Should return true if the item exists", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = true; // Act const actual = target.contains(2); // Assert assert.equal(actual, expected); }); test("Should return false if the item doesnt exist", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = false; // Act const actual = target.contains(5); // Assert assert.equal(actual, expected); }); }); test("Should be able to insert items", function () { // Arrange const target = new List<string>(); target.add("a"); target.add("b"); target.add("c"); const expected = "a123bc"; // Act target.insert(1, "1"); target.insert(2, "2"); target.insert(3, "3"); let actual: string = ""; for (let item of target) { actual += item; } // Assert assert.equal(actual, expected); }); test("Should remove item at a given index", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = "13"; // Act target.removeAt(1); let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); test("Should remove specified item", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = "13"; // Act target.remove(2); let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); test("Should copy data to an array", function () { // Arrange const target = new List<number>(); target.add(1); target.add(2); target.add(3); const expected = "[1,2,3]"; // Act let array: number[] = []; target.copyTo(array); const actual = JSON.stringify(array); // Assert assert.equal(actual, expected); }); test("Should copy data to an array starting at a set index", function () { // Arrange const target = new List<number>(); const expected = false; // Act const actual = target.isReadOnly; // Assert assert.equal(actual, expected); }); test("Should not be instansible from an array", function () { // Arrange let target: List<number>; let array: number[] = [1, 2, 3]; const expected = "123"; // Act target = new List(array); let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); test("Should not be instansible from an IterableIterator", function () { // Arrange let target: List<number>; let iterator = function* (): IterableIterator<number> { yield 1; yield 2; yield 3; }; const expected = "123"; // Act target = new List(iterator()); let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); test("Should add range of items", function () { // Arrange const target = new List<number>(); let iterator = function* (): IterableIterator<number> { yield 1; yield 2; yield 3; }; const expected = "123";<|fim▁hole|> // Act target.addRange(iterator()); let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); test("Should insert range of items", function () { // Arrange const target = new List<number>(); target.add(0); target.add(4); target.add(5); let iterator = function* (): IterableIterator<number> { yield 1; yield 2; yield 3; }; const expected = "012345"; // Act target.insertRange(1, iterator()); let actual = ""; for (let item of target) { actual += item.toString(); } // Assert assert.equal(actual, expected); }); test("Should remove all items which match a predicate", function () { // Arrange const target = new List<number>(); const expected = "246810"; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); // Act target.removeAll(x => x % 2 !== 0); let actual: string = ""; for (let item of target) { actual += item; } // Assert assert.equal(actual, expected); }); test("Should be sorted using a comparer", function () { // Arrange const target = new List<number>(); const expected = "12345678910"; target.add(9); target.add(10); target.add(4); target.add(7); target.add(8); target.add(5); target.add(6); target.add(2); target.add(1); target.add(3); const comparer = (x: number, y: number) => { if (x === y) return 0; if (x > y) return 1; return -1; }; // Act target.sort(comparer); let actual: string = ""; for (let item of target) { actual += item; } // Assert assert.equal(actual, expected); }); test("Should be reversable", function () { // Arrange const target = new List<number>(); const expected = "12876543910"; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); const comparer = (x: number, y: number) => { if (x === y) return 0; if (x > y) return 1; return -1; }; // Act target.reverse(2, 6); let actual: string = ""; for (let item of target) { actual += item; } // Assert assert.equal(actual, expected); }); test("Should be binary searchable", function () { // Arrange const target = new List<number>(); const expected = 2; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); // Act const actual = target.binarySearch(3); // Assert assert.equal(actual, expected); }); test("Should return range of items", function () { // Arrange const target = new List<number>(); const expected = "345678"; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); const comparer = (x: number, y: number) => { if (x === y) return 0; if (x > y) return 1; return -1; }; // Act const actualList = target.getRange(2, 6); let actual: string = ""; for (let item of actualList) { actual += item; } // Assert assert.equal(actual, expected); }); test("Should return index of given item", function () { // Arrange const target = new List<number>(); const expected = 2; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); // Act const actual = target.indexOf(3); // Assert assert.equal(actual, expected); }); test("Should return last index of given item", function () { // Arrange const target = new List<number>(); const expected = 6; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(4); target.add(3); target.add(2); target.add(1); // Act const actual = target.lastIndexOf(3); // Assert assert.equal(actual, expected); }); test("Should remove items in a range", function () { // Arrange const target = new List<number>(); const expected = "12910"; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); // Act target.removeRange(2, 6); let actual: string = ""; for (let item of target) { actual += item; } // Assert assert.equal(actual, expected); }); test("Should clear all items", function () { // Arrange const target = new List<number>(); const expected = 0; target.add(1); target.add(2); target.add(3); target.add(4); target.add(5); target.add(6); target.add(7); target.add(8); target.add(9); target.add(10); // Act target.clear(); const actual = target.count; // Assert assert.equal(actual, expected); }); });<|fim▁end|>
<|file_name|>PybindDataCollector.cc<|end_file_name|><|fim▁begin|>#include "pybind11/pybind11.h" #include "eudaq/DataCollector.hh" namespace py = pybind11; class PyDataCollector; namespace{ auto dummy = eudaq::Factory<eudaq::DataCollector>:: Register<PyDataCollector, const std::string&, const std::string&> (eudaq::cstr2hash("PyDataCollector")); } class PyDataCollector : public eudaq::DataCollector { public: using eudaq::DataCollector::DataCollector; static eudaq::DataCollectorSP Make(const std::string &code_name, const std::string &name, const std::string &runctrl){ if(code_name != "PyDataCollector"){ EUDAQ_THROW("The code_name of Python datacollector is not PyDataCollector."); } return eudaq::DataCollector::Make(code_name, name, runctrl); }; void DoInitialise() override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector,<|fim▁hole|> DoInitialise ); } void DoConfigure() override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoConfigure ); } void DoStartRun() override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoStartRun ); } void DoStopRun() override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoStopRun ); } void DoReset() override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoReset ); } void DoTerminate() override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoTerminate ); } void DoConnect(eudaq::ConnectionSPC id) override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoConnect, id ); } void DoDisconnect(eudaq::ConnectionSPC id) override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoDisconnect, id ); } void DoReceive(eudaq::ConnectionSPC id, eudaq::EventSP ev) override { PYBIND11_OVERLOAD(void, /* Return type */ eudaq::DataCollector, DoReceive, id, ev ); } }; void init_pybind_datacollector(py::module &m){ py::class_<eudaq::DataCollector, PyDataCollector, std::shared_ptr<eudaq::DataCollector>> datacollector_(m, "DataCollector"); datacollector_.def(py::init(&eudaq::DataCollector::Make, &PyDataCollector::Make)); datacollector_.def("DoInitialise", &eudaq::DataCollector::DoInitialise); datacollector_.def("DoConfigure", &eudaq::DataCollector::DoConfigure); datacollector_.def("DoStartRun", &eudaq::DataCollector::DoStartRun); datacollector_.def("DoStopRun", &eudaq::DataCollector::DoStopRun); datacollector_.def("DoReset", &eudaq::DataCollector::DoReset); datacollector_.def("DoTerminate", &eudaq::DataCollector::DoTerminate); datacollector_.def("DoConnect", &eudaq::DataCollector::DoConnect, "Called when a producer is connecting", py::arg("id")); datacollector_.def("DoDisconnect", &eudaq::DataCollector::DoDisconnect, "Called when a producer is disconnecting", py::arg("id")); datacollector_.def("DoReceive", &eudaq::DataCollector::DoReceive, "Called when an event is recievied", py::arg("id"), py::arg("ev")); datacollector_.def("SetServerAddress", &eudaq::DataCollector::SetServerAddress, "Set port of the data listening", py::arg("addr")); datacollector_.def("Connect", &eudaq::DataCollector::Connect); datacollector_.def("IsConnected", &eudaq::DataCollector::IsConnected); datacollector_.def("GetConfigItem", &eudaq::DataCollector::GetConfigItem, "Get an item from datacollector's config section", py::arg("key")); datacollector_.def("GetInitItem", &eudaq::DataCollector::GetInitItem, "Get an item from datacollector's init section", py::arg("key") ); }<|fim▁end|>
<|file_name|>SetDontInlineMethodTest.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation,<|fim▁hole|> * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test SetDontInlineMethodTest * @compile -J-XX:+UnlockDiagnosticVMOptions -J-XX:+WhiteBoxAPI CompilerWhiteBoxTest.java * @compile -J-XX:+UnlockDiagnosticVMOptions -J-XX:+WhiteBoxAPI SetDontInlineMethodTest.java * @run main/othervm -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI SetDontInlineMethodTest * @author [email protected] */ public class SetDontInlineMethodTest extends CompilerWhiteBoxTest { public static void main(String[] args) throws Exception { new SetDontInlineMethodTest().runTest(); } protected void test() throws Exception { if (WHITE_BOX.setDontInlineMethod(METHOD, true)) { throw new RuntimeException("on start " + METHOD + " must be inlineable"); } if (!WHITE_BOX.setDontInlineMethod(METHOD, true)) { throw new RuntimeException("after first change to true " + METHOD + " must be not inlineable"); } if (!WHITE_BOX.setDontInlineMethod(METHOD, false)) { throw new RuntimeException("after second change to true " + METHOD + " must be still not inlineable"); } if (WHITE_BOX.setDontInlineMethod(METHOD, false)) { throw new RuntimeException("after first change to false" + METHOD + " must be inlineable"); } if (WHITE_BOX.setDontInlineMethod(METHOD, false)) { throw new RuntimeException("after second change to false " + METHOD + " must be inlineable"); } } }<|fim▁end|>
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
<|file_name|>0014_auto_20160904_2350.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.9 on 2016-09-04 23:50 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('courses', '0013_auto_20160903_0212'), ] operations = [ migrations.RenameField( model_name='section', old_name='approachable_rating', new_name='cached_approachable_rating', ), migrations.RenameField( model_name='section', old_name='competency_rating', new_name='cached_competency_rating', ), migrations.RenameField( model_name='section', old_name='difficulty_rating', new_name='cached_difficulty_rating', ), migrations.RenameField( model_name='section', old_name='engagement_rating', new_name='cached_engagement_rating', ), migrations.RenameField( model_name='section', old_name='enthusiasm_rating', new_name='cached_enthusiasm_rating', ),<|fim▁hole|> migrations.RenameField( model_name='section', old_name='lecturing_rating', new_name='cached_lecturing_rating', ), migrations.RenameField( model_name='section', old_name='rating', new_name='cached_rating', ), migrations.RenameField( model_name='section', old_name='useful_rating', new_name='cached_useful_rating', ), ]<|fim▁end|>
<|file_name|>Config_acapon_Efficiency.C<|end_file_name|><|fim▁begin|>Bool_t SetGeneratedSmearingHistos = kFALSE; Bool_t GetCentralityFromAlien = kFALSE; std::string centralityFilename = ""; std::string centralityFilenameFromAlien = "/alice/cern.ch/user/a/acapon/.root"; const Int_t triggerNames = AliVEvent::kINT7; const Int_t nMCSignal = 0; const Int_t nCutsetting = 0; const Double_t minGenPt = 0.05; const Double_t maxGenPt = 20; const Double_t minGenEta = -1.5; const Double_t maxGenEta = 1.5; const Double_t minPtCut = 0.2; const Double_t maxPtCut = 15.0; const Double_t minEtaCut = -0.8; const Double_t maxEtaCut = 0.8; // const Double_t minPtCut = 0.2; // const Double_t maxPtCut = 8.0; // const Double_t minEtaCut = -0.8; // const Double_t maxEtaCut = 0.8; // binning of single leg histograms Bool_t usePtVector = kTRUE; Double_t ptBins[] = {0.00,0.05,0.10,0.15,0.20,0.25,0.30,0.35,0.40, 0.45,0.50,0.55,0.60,0.65,0.70,0.75,0.80,0.85, 0.90,0.95,1.00,1.10,1.20,1.30,1.40,1.50,1.60, 1.70,1.80,1.90,2.00,2.10,2.30,2.50,3.00,3.50, 4.00,5.00,6.00,7.00,8.00,10.0,15.0}; const Int_t nBinsPt = ( sizeof(ptBins) / sizeof(ptBins[0]) )-1; const Double_t minPtBin = 0; const Double_t maxPtBin = 20; const Int_t stepsPtBin = 800; const Double_t minEtaBin = -1.0; const Double_t maxEtaBin = 1.0; const Int_t stepsEtaBin = 20; const Double_t minPhiBin = 0; const Double_t maxPhiBin = 6.3; const Int_t stepsPhiBin = 20; const Double_t minThetaBin = 0; const Double_t maxThetaBin = TMath::TwoPi(); const Int_t stepsThetaBin = 60; const Double_t minMassBin = 0; const Double_t maxMassBin = 5; const Int_t stepsMassBin = 500; const Double_t minPairPtBin = 0; const Double_t maxPairPtBin = 10; const Int_t stepsPairPtBin = 100; // Binning of resolution histograms const Int_t NbinsDeltaMom = 2000; const Double_t DeltaMomMin = -10.0; const Double_t DeltaMomMax = 10.0; const Int_t NbinsRelMom = 400; const Double_t RelMomMin = 0.0; const Double_t RelMomMax = 2.0; const Int_t NbinsDeltaEta = 200; const Double_t DeltaEtaMin = -0.4; const Double_t DeltaEtaMax = 0.4; const Int_t NbinsDeltaTheta = 200; const Double_t DeltaThetaMin = -0.4; const Double_t DeltaThetaMax = 0.4; const Int_t NbinsDeltaPhi = 200; const Double_t DeltaPhiMin = -0.4; const Double_t DeltaPhiMax = 0.4; void GetCentrality(const Int_t centrality, Double_t& CentMin, Double_t& CentMax){ std::cout << "GetCentrality with centrality " << centrality << std::endl; if (centrality == 0){CentMin = 0; CentMax = 100;} else if(centrality == 1){CentMin = 0; CentMax = 20;} else if(centrality == 2){CentMin = 20; CentMax = 40;} else if(centrality == 3){CentMin = 40; CentMax = 60;} else if(centrality == 4){CentMin = 60; CentMax = 100;} else if(centrality == 5){CentMin = 60; CentMax = 80;} else if(centrality == 6){CentMin = 80; CentMax = 100;} else if(centrality == 7){CentMin = 0; CentMax = 5;} else if(centrality == 8){CentMin = -1; CentMax = -1;} else {std::cout << "WARNING::Centrality range not found....." std::endl;} return; } void ApplyPIDpostCalibration(AliAnalysisTaskElectronEfficiencyV2* task, Int_t whichDet, Bool_t wSDD){ std::cout << task << std::endl; std::cout << "starting ApplyPIDpostCalibration()\n"; if(whichDet == 0 && wSDD){// ITS std::cout << "Loading ITS correction" << std::endl; TString localPath = "/home/aaron/Data/diElec_framework_output/PIDcalibration/"; TString fileName = "outputITS_MC.root"; TFile* inFile = TFile::Open(localPath+fileName); if(!inFile){ gSystem->Exec("alien_cp alien:///alice/cern.ch/user/a/acapon/PIDcalibration/"+fileName+" ."); std::cout << "Copy ITS correction from Alien" << std::endl; inFile = TFile::Open(fileName); } else { std::cout << "Correction loaded" << std::endl; } TH3D* mean = dynamic_cast<TH3D*>(inFile->Get("sum_mean_correction")); TH3D* width= dynamic_cast<TH3D*>(inFile->Get("sum_width_correction")); task->SetCentroidCorrFunction(AliAnalysisTaskElectronEfficiencyV2::kITS, mean, AliDielectronVarManager::kP, AliDielectronVarManager::kEta, AliDielectronVarManager::kRefMultTPConly); task->SetWidthCorrFunction (AliAnalysisTaskElectronEfficiencyV2::kITS, width, AliDielectronVarManager::kP, AliDielectronVarManager::kEta, AliDielectronVarManager::kRefMultTPConly); } if(whichDet == 1){// TOF std::cout << "Loading TOF correction" << std::endl; TString localPath = "/home/aaron/Data/diElec_framework_output/PIDcalibration/"; TString fileName = "outputTOF"; if(wSDD == kTRUE){ fileName.Append("_MC.root"); }else{ fileName.Append("_woSDD_MC.root"); } TFile* inFile = TFile::Open(localPath+fileName); if(!inFile){ gSystem->Exec("alien_cp alien:///alice/cern.ch/user/a/acapon/PIDcalibration/"+fileName+" ."); std::cout << "Copy TOF correction from Alien" << std::endl; inFile = TFile::Open(fileName); } else { std::cout << "Correction loaded" << std::endl; } TH3D* mean = dynamic_cast<TH3D*>(inFile->Get("sum_mean_correction")); TH3D* width= dynamic_cast<TH3D*>(inFile->Get("sum_width_correction")); task->SetCentroidCorrFunction(AliAnalysisTaskElectronEfficiencyV2::kTOF, mean, AliDielectronVarManager::kP, AliDielectronVarManager::kEta, AliDielectronVarManager::kRefMultTPConly); task->SetWidthCorrFunction (AliAnalysisTaskElectronEfficiencyV2::kTOF, width, AliDielectronVarManager::kP, AliDielectronVarManager::kEta, AliDielectronVarManager::kRefMultTPConly); } } // ######################################################### // ######################################################### AliAnalysisFilter* SetupTrackCutsAndSettings(TString cutDefinition, Bool_t wSDD) { std::cout << "SetupTrackCutsAndSettings( cutInstance = " << cutDefinition << " )" <<std::endl; AliAnalysisFilter *anaFilter = new AliAnalysisFilter("anaFilter","anaFilter"); // named constructor seems mandatory! LMEECutLib* LMcutlib = new LMEECutLib(wSDD); if(cutDefinition == "kResolutionCuts"){ std::cout << "Resolution Track Cuts being set" << std::endl; anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kResolutionTrackCuts, LMEECutLib::kResolutionTrackCuts)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutSet1"){ //TMVA std::cout << "Setting up cut set 1" << std::endl; anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kCutSet1)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kTheoPID"){ // PID cut set from a Run 1 pPb analysis. Standard track cuts std::cout << "Setting up Theo PID. Standard track cuts." << std::endl; anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kTheoPID)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kScheidCuts"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kScheidCuts, LMEECutLib::kScheidCuts)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } // ######## PID Cut variation settings ################# // These variations use the kCutSet1 track cuts and only vary PID else if(cutDefinition == "kPIDcut1"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut1)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut2"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut2)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut3"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut3)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut4"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut4)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut5"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut5)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut6"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut6)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut7"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut7)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut8"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut8)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut9"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut9)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut10"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut10)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut11"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut11)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut12"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut12)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut13"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut13)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut14"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut14)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut15"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut15)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut16"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut16)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut17"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut17)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut18"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut18)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut19"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut19)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kPIDcut20"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutSet1, LMEECutLib::kPIDcut20)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } // ######## Track+ePID Cut variation settings ################# else if(cutDefinition == "kCutVar1"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar1, LMEECutLib::kCutVar1)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar2"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar2, LMEECutLib::kCutVar2)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar3"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar3, LMEECutLib::kCutVar3)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar4"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar4, LMEECutLib::kCutVar4)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar5"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar5, LMEECutLib::kCutVar5)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar6"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar6, LMEECutLib::kCutVar6)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar7"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar7, LMEECutLib::kCutVar7)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar8"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar8, LMEECutLib::kCutVar8)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar9"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar9, LMEECutLib::kCutVar9)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar10"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar10, LMEECutLib::kCutVar10)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar11"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar11, LMEECutLib::kCutVar11)); anaFilter->SetName(cutDefinition);<|fim▁hole|> anaFilter->Print(); } else if(cutDefinition == "kCutVar12"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar12, LMEECutLib::kCutVar12)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar13"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar13, LMEECutLib::kCutVar13)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar14"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar14, LMEECutLib::kCutVar14)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar15"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar15, LMEECutLib::kCutVar15)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar16"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar16, LMEECutLib::kCutVar16)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar17"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar17, LMEECutLib::kCutVar17)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar18"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar18, LMEECutLib::kCutVar18)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar19"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar19, LMEECutLib::kCutVar19)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else if(cutDefinition == "kCutVar20"){ anaFilter->AddCuts(LMcutlib->GetTrackCuts(LMEECutLib::kCutVar20, LMEECutLib::kCutVar20)); anaFilter->SetName(cutDefinition); anaFilter->Print(); } else{ std::cout << "Undefined cut definition...." << std::endl; return 0x0; } return anaFilter; } // ######################################################### // ######################################################### std::vector<Bool_t> AddSingleLegMCSignal(AliAnalysisTaskElectronEfficiencyV2* task){ // SetLegPDGs() requires two pdg codes. For single tracks a dummy value is // passed, "1". // All final state electrons (excluding conversion electrons) AliDielectronSignalMC eleFinalState("eleFinalState","eleFinalState"); eleFinalState.SetLegPDGs(11,1); eleFinalState.SetCheckBothChargesLegs(kTRUE,kTRUE); eleFinalState.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); eleFinalState.SetMotherPDGs(22, 22, kTRUE, kTRUE); // Exclude conversion electrons // Electrons from open charm mesons and baryons AliDielectronSignalMC eleFinalStateFromD("eleFinalStateFromD","eleFinalStateFromD"); eleFinalStateFromD.SetLegPDGs(11,1); eleFinalStateFromD.SetCheckBothChargesLegs(kTRUE,kTRUE); eleFinalStateFromD.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); eleFinalStateFromD.SetMotherPDGs(402, 402); eleFinalStateFromD.SetCheckBothChargesMothers(kTRUE,kTRUE); // Electrons from open beauty mesons and baryons AliDielectronSignalMC eleFinalStateFromB("eleFinalStateFromB","eleFinalStateFromB"); eleFinalStateFromB.SetLegPDGs(11,1); eleFinalStateFromB.SetCheckBothChargesLegs(kTRUE,kTRUE); eleFinalStateFromB.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); eleFinalStateFromB.SetMotherPDGs(502, 502); eleFinalStateFromB.SetCheckBothChargesMothers(kTRUE,kTRUE); // Add signals task->AddSingleLegMCSignal(eleFinalState); task->AddSingleLegMCSignal(eleFinalStateFromD); task->AddSingleLegMCSignal(eleFinalStateFromB); // This is used to get electrons not from same mother for pair efficiency. // Needed to look at D and B meson electrons as functionality to pair those is // not implemented in the framework. Instead, use all final start electrons // from D or B decays for efficiency correction, for example. // The ordering must match the ordering of the added signals above*. std::vector<Bool_t> DielectronsPairNotFromSameMother; DielectronsPairNotFromSameMother.push_back(kFALSE); DielectronsPairNotFromSameMother.push_back(kTRUE); DielectronsPairNotFromSameMother.push_back(kTRUE); return DielectronsPairNotFromSameMother; } // ######################################################### // ######################################################### void AddPairMCSignal(AliAnalysisTaskElectronEfficiencyV2* task){ // Dielectron pairs from same mother (excluding conversions) AliDielectronSignalMC pair_sameMother("sameMother","sameMother"); pair_sameMother.SetLegPDGs(11,-11); pair_sameMother.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother.SetMotherPDGs(22,22,kTRUE,kTRUE); // Exclude conversion //################################################################### // Signals for specific dielectron decay channels AliDielectronSignalMC pair_sameMother_pion("sameMother_pion","sameMother_pion"); pair_sameMother_pion.SetLegPDGs(11,-11); pair_sameMother_pion.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_pion.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_pion.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_pion.SetMotherPDGs(111,111); AliDielectronSignalMC pair_sameMother_eta("sameMother_eta","sameMother_eta"); pair_sameMother_eta.SetLegPDGs(11,-11); pair_sameMother_eta.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_eta.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_eta.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_eta.SetMotherPDGs(221,221); AliDielectronSignalMC pair_sameMother_etaP("sameMother_etaP","sameMother_etaP"); pair_sameMother_etaP.SetLegPDGs(11,-11); pair_sameMother_etaP.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_etaP.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_etaP.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_etaP.SetMotherPDGs(331,331); AliDielectronSignalMC pair_sameMother_rho("sameMother_rho","sameMother_rho"); pair_sameMother_rho.SetLegPDGs(11,-11); pair_sameMother_rho.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_rho.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_rho.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_rho.SetMotherPDGs(113, 113); AliDielectronSignalMC pair_sameMother_omega("sameMother_omega","sameMother_omega"); pair_sameMother_omega.SetLegPDGs(11,-11); pair_sameMother_omega.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_omega.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_omega.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_omega.SetMotherPDGs(223, 223); AliDielectronSignalMC pair_sameMother_phi("sameMother_phi","sameMother_phi"); pair_sameMother_phi.SetLegPDGs(11,-11); pair_sameMother_phi.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_phi.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_phi.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_phi.SetMotherPDGs(333, 333); AliDielectronSignalMC pair_sameMother_jpsi("sameMother_jpsi","sameMother_jpsi"); pair_sameMother_jpsi.SetLegPDGs(11,-11); pair_sameMother_jpsi.SetCheckBothChargesLegs(kTRUE,kTRUE); pair_sameMother_jpsi.SetLegSources(AliDielectronSignalMC::kFinalState, AliDielectronSignalMC::kFinalState); // Set mother properties pair_sameMother_jpsi.SetMothersRelation(AliDielectronSignalMC::kSame); pair_sameMother_jpsi.SetMotherPDGs(443, 443); task->AddPairMCSignal(pair_sameMother); // task->AddPairMCSignal(pair_sameMother_pion); // task->AddPairMCSignal(pair_sameMother_eta); // task->AddPairMCSignal(pair_sameMother_etaP); // task->AddPairMCSignal(pair_sameMother_rho); // task->AddPairMCSignal(pair_sameMother_omega); // task->AddPairMCSignal(pair_sameMother_phi); // task->AddPairMCSignal(pair_sameMother_jpsi); }<|fim▁end|>
<|file_name|>UIxTaskEditor.js<|end_file_name|><|fim▁begin|>/* -*- Mode: java; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ var contactSelectorAction = 'calendars-contacts'; function uixEarlierDate(date1, date2) { // can this be done in a sane way? if (date1 && date2) { if (date1.getYear() < date2.getYear()) return date1; if (date1.getYear() > date2.getYear()) return date2; // same year if (date1.getMonth() < date2.getMonth()) return date1; if (date1.getMonth() > date2.getMonth()) return date2; // same month if (date1.getDate() < date2.getDate()) return date1; if (date1.getDate() > date2.getDate()) return date2; } // same day return null; } function validateDate(which, label) { var result, dateValue; dateValue = this._getDate(which); if (dateValue == null) { alert(label); result = false; } else result = dateValue; return result; } function validateTaskEditor() { var e, startdate, enddate, tmpdate; e = document.getElementById('summary'); if (e.value.length == 0 && !confirm(labels.validate_notitle)) return false; e = document.getElementById('startTime_date'); if (!e.disabled) { startdate = validateDate('start', labels.validate_invalid_startdate); if (!startdate) return false; } e = document.getElementById('dueTime_date'); if (!e.disabled) { enddate = validateDate('due', labels.validate_invalid_enddate); if (!enddate) return false; } if (startdate && enddate) { tmpdate = uixEarlierDate(startdate, enddate); if (tmpdate == enddate) { alert(labels.validate_endbeforestart); return false; } else if (tmpdate == null /* means: same date */) { // TODO: check time var startHour, startMinute, endHour, endMinute; var matches; matches = document.forms[0]['startTime_time'].value.match(/([0-9]+):([0-9]+)/); if (matches) { startHour = parseInt(matches[1]); startMinute = parseInt(matches[2]); matches = document.forms[0]['dueTime_time'].value.match(/([0-9]+):([0-9]+)/); if (matches) { endHour = parseInt(matches[1]); endMinute = parseInt(matches[2]); if (startHour > endHour) { alert(labels.validate_endbeforestart); return false; } else if (startHour == endHour) { if (startMinute > endMinute) { alert(labels.validate_endbeforestart); return false; } } } else { alert(labels.validate_invalid_enddate); return false; } } else { alert(labels.validate_invalid_startdate); return false; } } } return true; } function onTimeControlCheck(checkBox) { if (checkBox) { var inputs = checkBox.parentNode.getElementsByTagName("input"); var selects = checkBox.parentNode.getElementsByTagName("select"); for (var i = 0; i < inputs.length; i++) if (inputs[i] != checkBox) inputs[i].disabled = !checkBox.checked; for (var i = 0; i < selects.length; i++) if (selects[i] != checkBox) selects[i].disabled = !checkBox.checked; if (checkBox.id == "startDateCB") { $("repeatList").disabled = !checkBox.checked; $("reminderList").disabled = !checkBox.checked; } } } function saveEvent(sender) { if (validateTaskEditor()) document.forms['editform'].submit(); return false; } function startDayAsShortString() { return dayAsShortDateString($('startTime_date')); } function dueDayAsShortString() { return dayAsShortDateString($('dueTime_date')); } this._getDate = function(which) { var date = window.timeWidgets[which]['date'].inputAsDate(); var time = window.timeWidgets[which]['time'].value.split(":"); date.setHours(time[0]); date.setMinutes(time[1]); if (isNaN(date.getTime())) return null; return date;<|fim▁hole|> var date = window.timeWidgets[which]['date'].getAttribute("shadow-value").asDate(); var time = window.timeWidgets[which]['time'].getAttribute("shadow-value").split(":"); date.setHours(time[0]); date.setMinutes(time[1]); return date; }; this.getStartDate = function() { return this._getDate('start'); }; this.getDueDate = function() { return this._getDate('due'); }; this.getShadowStartDate = function() { return this._getShadowDate('start'); }; this.getShadowDueDate = function() { return this._getShadowDate('due'); }; this._setDate = function(which, newDate) { window.timeWidgets[which]['date'].setInputAsDate(newDate); window.timeWidgets[which]['time'].value = newDate.getDisplayHoursString(); // Update date picker var dateComponent = jQuery(window.timeWidgets[which]['date']).closest('.date'); dateComponent.data('date', window.timeWidgets[which]['date'].value); dateComponent.datepicker('update'); }; this.setStartDate = function(newStartDate) { this._setDate('start', newStartDate); }; this.setDueDate = function(newDueDate) { this._setDate('due', newDueDate); }; this.onAdjustTime = function(event) { onAdjustDueTime(event); }; this.onAdjustDueTime = function(event) { if (!window.timeWidgets['due']['date'].disabled) { var dateDelta = (window.getStartDate().valueOf() - window.getShadowStartDate().valueOf()); var newDueDate = new Date(window.getDueDate().valueOf() + dateDelta); window.setDueDate(newDueDate); } window.timeWidgets['start']['date'].updateShadowValue(); window.timeWidgets['start']['time'].updateShadowValue(); }; this.initTimeWidgets = function (widgets) { this.timeWidgets = widgets; jQuery(widgets['start']['date']).closest('.date').datepicker({autoclose: true, weekStart: firstDayOfWeek}) .on('changeDate', onAdjustTime); widgets['start']['time'].on("time:change", onAdjustDueTime); widgets['start']['time'].addInterface(SOGoTimePickerInterface); jQuery(widgets['due']['date']).closest('.date').datepicker({autoclose: true, weekStart: firstDayOfWeek}); widgets['due']['time'].addInterface(SOGoTimePickerInterface); jQuery('#statusTime_date').closest('.date').datepicker({autoclose: true, weekStart: firstDayOfWeek}); }; function onStatusListChange(event) { var value = $("statusList").value; var statusTimeDate = $("statusTime_date"); var statusPercent = $("statusPercent"); if (value == "WONoSelectionString") { statusTimeDate.disabled = true; statusPercent.disabled = true; statusPercent.value = ""; } else if (value == "0") { statusTimeDate.disabled = true; statusPercent.disabled = false; } else if (value == "1") { statusTimeDate.disabled = true; statusPercent.disabled = false; } else if (value == "2") { statusTimeDate.disabled = false; statusPercent.disabled = false; statusPercent.value = "100"; } else if (value == "3") { statusTimeDate.disabled = true; statusPercent.disabled = true; } else { statusTimeDate.disabled = true; } } function initializeStatusLine() { var statusList = $("statusList"); if (statusList) { statusList.observe("change", onStatusListChange); } } function onTaskEditorLoad() { if (readOnly == false) { var widgets = {'start': {'date': $("startTime_date"), 'time': $("startTime_time")}, 'due': {'date': $("dueTime_date"), 'time': $("dueTime_time")}}; initTimeWidgets(widgets); } // Enable or disable the reminder list onTimeControlCheck($("startDateCB")); initializeStatusLine(); } document.observe("dom:loaded", onTaskEditorLoad);<|fim▁end|>
}; this._getShadowDate = function(which) {
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! # Getting started //! //! ```rust,no_run //! extern crate sdl2; //! //! use sdl2::pixels::Color; //! use sdl2::event::Event; //! use sdl2::keyboard::Keycode; //! use std::time::Duration; //! //! pub fn main() { //! let sdl_context = sdl2::init().unwrap(); //! let video_subsystem = sdl_context.video().unwrap(); //! //! let window = video_subsystem.window("rust-sdl2 demo", 800, 600) //! .position_centered() //! .build() //! .unwrap(); //! //! let mut canvas = window.into_canvas().build().unwrap(); //! //! canvas.set_draw_color(Color::RGB(0, 255, 255)); //! canvas.clear(); //! canvas.present(); //! let mut event_pump = sdl_context.event_pump().unwrap(); //! let mut i = 0; //! 'running: loop { //! i = (i + 1) % 255; //! canvas.set_draw_color(Color::RGB(i, 64, 255 - i)); //! canvas.clear(); //! for event in event_pump.poll_iter() { //! match event { //! Event::Quit {..} | //! Event::KeyDown { keycode: Some(Keycode::Escape), .. } => { //! break 'running //! }, //! _ => {} //! } //! } //! // The rest of the game loop goes here... //! //! canvas.present(); //! ::std::thread::sleep(Duration::new(0, 1_000_000_000u32 / 60)); //! } //! } //! ``` #![crate_name = "sdl2"] #![crate_type = "lib"] #![allow(clippy::cast_lossless, clippy::transmute_ptr_to_ref)] pub extern crate libc; #[macro_use] extern crate lazy_static; #[macro_use] extern crate bitflags; pub extern crate sdl2_sys as sys; #[cfg(feature = "gfx")] extern crate c_vec; pub use crate::sdl::*; pub mod clipboard; pub mod cpuinfo; #[macro_use] mod macros; pub mod audio; pub mod controller; pub mod event; pub mod filesystem; pub mod haptic; pub mod hint; pub mod joystick; pub mod keyboard; pub mod log; pub mod messagebox; pub mod mouse; pub mod pixels; pub mod rect; pub mod render; pub mod rwops; mod sdl; pub mod surface; pub mod timer; pub mod touch; pub mod version; pub mod video; // modules #[cfg(feature = "gfx")] pub mod gfx; #[cfg(feature = "image")] pub mod image;<|fim▁hole|>#[cfg(feature = "ttf")] pub mod ttf; mod common; // Export return types and such from the common module. pub use crate::common::IntegerOrSdlError; #[cfg(feature = "raw-window-handle")] pub mod raw_window_handle;<|fim▁end|>
#[cfg(feature = "mixer")] pub mod mixer;
<|file_name|>badwebserver_jsonly.py<|end_file_name|><|fim▁begin|>import socket, sys, time, argparse parser = argparse.ArgumentParser(description="This bad server accepts an HTTP connection and replies with a valid HTML document which links to assets. However, attemps to load the assets should result in a net::ERR_EMPTY_RESPONSE.") parser.add_argument("-p", "--port", type=int, help="The port to listen for new connections on.", default=8080) parser.add_argument("-t", "--tries", type=int, help="The number of attempts before asset requests will be responded to successfully", default=5) args = parser.parse_args() serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serversocket.bind(('localhost', args.port)) serversocket.listen(5) print "The bad web server is listening on port %s. Requests for the HTML index will always be replied to. Assets requests will be responded to after %s unsuccessful attempts.\n" % (args.port, args.tries) response_text = """HTTP/1.0 200 OK Server: BadWebServer v0.1 Content-Type: text/html <!DOCTYPE html> <head> <meta charset="utf-8"> <title>Bad Web Server</title> <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.8.0/jquery.min.js"></script> <script src="/script.js" id="script01"></script> <script type="text/javascript"> function refresh_location_hints() { $('#for_script01').val($('#script01').attr('src')); $('#for_css01').val($('#css01').attr('href')); $('#for_img01').val($('#img01').attr('src')); $('#img01').attr('alt', $('#img01').attr('src')); setTimeout(function() { refresh_location_hints(); }, 1000); } $(document).ready(function() { setTimeout(function() { refresh_location_hints(); }, 1000); }); </script> <style> input { width: 600px; } </style> </head> <body> <header> <h1>About Bad Web Server</h1> <p>The bad web server will correctly transfer a valid HTML5 document to the browser when the browser requests the resource identified as '/'. The page will also request images, stylesheets and javascript resources from the server - but these should all result in the browser encountering a socket error and triggering a net::ERR_EMPTY_RESPONSE. The javascript will correctly load after the 5th attempt and display an alert to the user when it loads correctly, as will the CSS resource. We also import JQuery to dynamicly hint at the current location of each failed resource for testing.</p> </header> <article> <input type="text" id="for_script01"> External Script (#script01) URL<br> </article> </body> </html>""" js_response_text = """HTTP/1.0 200 OK Server: BadWebServer v0.1 Content-Type: text/javascript alert("Javascript resource ('#script_01') loaded successfully after %s attempts");""" % args.tries css_response_text = """HTTP/1.0 200 OK Server: BadWebServer v0.1 Content-Type: text/stylesheet * { margin: 5px; padding: 5px; } body { background-color: #00ff00; color: #555555; }""" css_requests = js_requests = 0 while True: #accept connections from outside (clientsocket, address) = serversocket.accept() chunks = [] bytes_recd = 0 chunk = "" while "\r\n\r\n" not in chunk: chunk = clientsocket.recv(min(2048 - bytes_recd, 2048)) if chunk == '': raise RuntimeError("socket connection broken (but not by me)") chunks.append(chunk) bytes_recd = bytes_recd + len(chunk) header = ''.join(chunks) print "Received: " + header request_line = header.split("\r\n")[0] resource_marker = request_line.split()[1] if resource_marker is "/" or resource_marker is "/index.html" or resource_marker is "/index.htm": print "^ INDEX - WILL REPLY ^" clientsocket.send(response_text); clientsocket.shutdown(0) elif ".css" in resource_marker: css_requests += 1 if css_requests > args.tries: css_requests = 0 print "^ FINAL CSS REQUEST - WILL REPLY ^" clientsocket.send(css_response_text) clientsocket.shutdown(0) else: print "^ CSS REQUEST #%s - WILL NOT REPLY ^" % css_requests elif ".js" in resource_marker: js_requests += 1<|fim▁hole|> clientsocket.shutdown(0) else: print "^ JS REQUEST #%s - WILL NOT REPLY ^" % js_requests else: print "^ WILL NOT REPLY ^" print "\n" clientsocket.close()<|fim▁end|>
if js_requests > args.tries: js_requests = 0 print "^ FINAL JS REQUEST - WILL REPLY ^" clientsocket.send(js_response_text)
<|file_name|>positionList.py<|end_file_name|><|fim▁begin|>############################################################################### # This file is part of openWNS (open Wireless Network Simulator) # _____________________________________________________________________________ # # Copyright (C) 2004-2007 # Chair of Communication Networks (ComNets)<|fim▁hole|># Kopernikusstr. 16, D-52074 Aachen, Germany # phone: ++49-241-80-27910, # fax: ++49-241-80-22242 # email: [email protected] # www: http://www.openwns.org # _____________________________________________________________________________ # # openWNS is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License version 2 as published by the # Free Software Foundation; # # openWNS is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import scenarios.interfaces import openwns.geometry.position import math class PositionListPlacer(scenarios.interfaces.INodePlacer): """ Place a number of nodes on the given positions. """ def __init__(self, numberOfNodes = 1, positionsList = [openwns.geometry.position.Position(1,1)], rotate = 0.0): """ @type numberOfNodes: int @param numberOfNodes: The number of nodes on the circle @Type: position: float @param position: distance from BS in Meters for every single node @type rotate: float @param rotate: Rotate the final result by rotate in radiant [0..2pi] """ self.center = openwns.geometry.position.Position(x = 0.0, y = 0.0, z = 0.0) self.numberOfNodes = numberOfNodes self.positionsList = positionsList self.rotate = rotate def setCenter(self, center): self.center = center def getPositions(self): positions = [] for i in xrange(self.numberOfNodes): x = self.positionsList[i].x y = self.positionsList[i].y v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0) p = v.turn2D(self.rotate).toPosition() positions.append(p) return [p + self.center for p in positions] def isInside(self, position): for i in xrange(self.numberOfNodes): x = self.positionsList[i].x y = self.positionsList[i].y v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0) p = v.turn2D(self.rotate).toPosition() if p.x + self.center.x == position.x: return True return False<|fim▁end|>
<|file_name|>RowSelector.ts<|end_file_name|><|fim▁begin|>import {Component, Input, Output, EventEmitter, OnInit, DoCheck} from "@angular/core"; @Component({ selector: "mfRowSelector", template: ` <input type="checkbox" id="{{checkboxId}}" [checked]="isChecked" (change)="onChange($event)" /> <label attr.for="{{checkboxId}}"></label> ` }) export class RowSelector implements OnInit, DoCheck { @Output("selectEntity") rowSelected = new EventEmitter(); @Input("entity") private rowEntity: any = Object; @Input("selectedEntities") private selectedEntities: any[]; @Input("checkboxId") checkboxId: string; isChecked: boolean = false; public constructor() { } <|fim▁hole|> } public ngDoCheck() { this.getIsChecked(); } private getIsChecked() { if (this.selectedEntities != null) { let index = this.selectedEntities.indexOf(this.rowEntity); this.isChecked = index > -1; } } onChange($event) { this.isChecked = !this.isChecked; this.rowSelected.emit(this.rowEntity); } }<|fim▁end|>
public ngOnInit() { this.getIsChecked();
<|file_name|>oapMemoryApiTests.cpp<|end_file_name|><|fim▁begin|>/* * Copyright 2016 - 2021 Marcin Matula * * This file is part of Oap. * * Oap is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Oap is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Oap. If not, see <http://www.gnu.org/licenses/>. */ #include "gtest/gtest.h" #include "oapHostMemoryApi.h" class OapMemoryApiTests : public testing::Test { public: virtual void SetUp() {} virtual void TearDown() {} }; TEST_F(OapMemoryApiTests, Test_1) { oap::Memory memory = oap::host::NewMemory ({1, 1}); oap::Memory memory1 = oap::host::ReuseMemory (memory); oap::Memory memory2 = oap::host::ReuseMemory (memory); oap::host::DeleteMemory (memory); oap::host::DeleteMemory (memory1); oap::host::DeleteMemory (memory2); } TEST_F(OapMemoryApiTests, Test_2) { oap::Memory memory = oap::host::NewMemoryWithValues ({2, 1}, 2.f); oap::host::DeleteMemory (memory);<|fim▁hole|>}<|fim▁end|>
<|file_name|>ImageDialog.java<|end_file_name|><|fim▁begin|>package net.sf.memoranda.ui.htmleditor; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Image; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.WindowEvent; import java.awt.event.WindowListener; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.UIManager; import javax.swing.border.EmptyBorder; import javax.swing.border.EtchedBorder; import net.sf.memoranda.ui.htmleditor.util.Local; /** * <p>Title: </p> * <p>Description: </p> * <p>Copyright: Copyright (c) 2002</p> * <p>Company: </p> * @author unascribed * @version 1.0 */ public class ImageDialog extends JDialog implements WindowListener { /** * */ private static final long serialVersionUID = 5326851249529076804L; JPanel headerPanel = new JPanel(new FlowLayout(FlowLayout.LEFT)); JLabel header = new JLabel(); JPanel areaPanel = new JPanel(new GridBagLayout()); GridBagConstraints gbc; JLabel jLabel1 = new JLabel(); public JTextField fileField = new JTextField(); JButton browseB = new JButton(); JLabel jLabel2 = new JLabel(); public JTextField altField = new JTextField(); JLabel jLabel3 = new JLabel(); public JTextField widthField = new JTextField(); JLabel jLabel4 = new JLabel(); public JTextField heightField = new JTextField(); JLabel jLabel5 = new JLabel(); public JTextField hspaceField = new JTextField(); JLabel jLabel6 = new JLabel(); public JTextField vspaceField = new JTextField(); JLabel jLabel7 = new JLabel(); public JTextField borderField = new JTextField(); JLabel jLabel8 = new JLabel(); String[] aligns = {"left", "right", "top", "middle", "bottom", "absmiddle", "texttop", "baseline"}; // Note: align values are not localized because they are HTML keywords public JComboBox<String> alignCB = new JComboBox<String>(aligns); JLabel jLabel9 = new JLabel();<|fim▁hole|> JButton cancelB = new JButton(); public boolean CANCELLED = false; public ImageDialog(Frame frame) { super(frame, Local.getString("Image"), true); try { jbInit(); pack(); } catch (Exception ex) { ex.printStackTrace(); } super.addWindowListener(this); } public ImageDialog() { this(null); } void jbInit() throws Exception { this.setResizable(false); // three Panels, so used BorderLayout for this dialog. headerPanel.setBorder(new EmptyBorder(new Insets(0, 5, 0, 5))); headerPanel.setBackground(Color.WHITE); header.setFont(new java.awt.Font("Dialog", 0, 20)); header.setForeground(new Color(0, 0, 124)); header.setText(Local.getString("Image")); header.setIcon(new ImageIcon( net.sf.memoranda.ui.htmleditor.ImageDialog.class.getResource( "resources/icons/imgbig.png"))); headerPanel.add(header); this.getContentPane().add(headerPanel, BorderLayout.NORTH); areaPanel.setBorder(new EtchedBorder(Color.white, new Color(142, 142, 142))); jLabel1.setText(Local.getString("Image file")); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 0; gbc.insets = new Insets(10, 10, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel1, gbc); fileField.setMinimumSize(new Dimension(200, 25)); fileField.setPreferredSize(new Dimension(285, 25)); gbc = new GridBagConstraints(); gbc.gridx = 1; gbc.gridy = 0; gbc.gridwidth = 5; gbc.insets = new Insets(10, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; gbc.fill = GridBagConstraints.HORIZONTAL; areaPanel.add(fileField, gbc); browseB.setMinimumSize(new Dimension(25, 25)); browseB.setPreferredSize(new Dimension(25, 25)); browseB.setIcon(new ImageIcon( net.sf.memoranda.ui.htmleditor.ImageDialog.class.getResource( "resources/icons/fileopen16.png"))); browseB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(ActionEvent e) { browseB_actionPerformed(e); } }); gbc = new GridBagConstraints(); gbc.gridx = 6; gbc.gridy = 0; gbc.insets = new Insets(10, 5, 5, 10); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(browseB, gbc); jLabel2.setText(Local.getString("ALT text")); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 1; gbc.insets = new Insets(5, 10, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel2, gbc); altField.setPreferredSize(new Dimension(315, 25)); altField.setMinimumSize(new Dimension(200, 25)); gbc = new GridBagConstraints(); gbc.gridx = 1; gbc.gridy = 1; gbc.gridwidth = 6; gbc.insets = new Insets(5, 5, 5, 10); gbc.anchor = GridBagConstraints.WEST; gbc.fill = GridBagConstraints.HORIZONTAL; areaPanel.add(altField, gbc); jLabel3.setText(Local.getString("Width")); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 2; gbc.insets = new Insets(5, 10, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel3, gbc); widthField.setPreferredSize(new Dimension(30, 25)); widthField.setMinimumSize(new Dimension(30, 25)); gbc = new GridBagConstraints(); gbc.gridx = 1; gbc.gridy = 2; gbc.insets = new Insets(5, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(widthField, gbc); jLabel4.setText(Local.getString("Height")); gbc = new GridBagConstraints(); gbc.gridx = 2; gbc.gridy = 2; gbc.insets = new Insets(5, 50, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel4, gbc); heightField.setMinimumSize(new Dimension(30, 25)); heightField.setPreferredSize(new Dimension(30, 25)); gbc = new GridBagConstraints(); gbc.gridx = 3; gbc.gridy = 2; gbc.insets = new Insets(5, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(heightField, gbc); jLabel5.setText(Local.getString("H. space")); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 3; gbc.insets = new Insets(5, 10, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel5, gbc); hspaceField.setMinimumSize(new Dimension(30, 25)); hspaceField.setPreferredSize(new Dimension(30, 25)); hspaceField.setText("0"); gbc = new GridBagConstraints(); gbc.gridx = 1; gbc.gridy = 3; gbc.insets = new Insets(5, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(hspaceField, gbc); jLabel6.setText(Local.getString("V. space")); gbc = new GridBagConstraints(); gbc.gridx = 2; gbc.gridy = 3; gbc.insets = new Insets(5, 50, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel6, gbc); vspaceField.setMinimumSize(new Dimension(30, 25)); vspaceField.setPreferredSize(new Dimension(30, 25)); vspaceField.setText("0"); gbc = new GridBagConstraints(); gbc.gridx = 3; gbc.gridy = 3; gbc.insets = new Insets(5, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(vspaceField, gbc); jLabel7.setText(Local.getString("Border")); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 4; gbc.insets = new Insets(5, 10, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel7, gbc); borderField.setMinimumSize(new Dimension(30, 25)); borderField.setPreferredSize(new Dimension(30, 25)); borderField.setText("0"); gbc = new GridBagConstraints(); gbc.gridx = 1; gbc.gridy = 4; gbc.insets = new Insets(5, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(borderField, gbc); jLabel8.setText(Local.getString("Align")); gbc = new GridBagConstraints(); gbc.gridx = 2; gbc.gridy = 4; gbc.insets = new Insets(5, 50, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel8, gbc); alignCB.setBackground(new Color(230, 230, 230)); alignCB.setFont(new java.awt.Font("Dialog", 1, 10)); alignCB.setPreferredSize(new Dimension(100, 25)); alignCB.setSelectedIndex(0); gbc = new GridBagConstraints(); gbc.gridx = 3; gbc.gridy = 4; gbc.gridwidth = 2; gbc.insets = new Insets(5, 5, 5, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(alignCB, gbc); jLabel9.setText(Local.getString("Hyperlink")); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 5; gbc.insets = new Insets(5, 10, 10, 5); gbc.anchor = GridBagConstraints.WEST; areaPanel.add(jLabel9, gbc); urlField.setPreferredSize(new Dimension(315, 25)); urlField.setMinimumSize(new Dimension(200, 25)); gbc = new GridBagConstraints(); gbc.gridx = 1; gbc.gridy = 5; gbc.gridwidth = 6; gbc.insets = new Insets(5, 5, 10, 10); gbc.fill = GridBagConstraints.HORIZONTAL; gbc.anchor = GridBagConstraints.WEST; areaPanel.add(urlField, gbc); this.getContentPane().add(areaPanel, BorderLayout.CENTER); okB.setMaximumSize(new Dimension(100, 26)); okB.setMinimumSize(new Dimension(100, 26)); okB.setPreferredSize(new Dimension(100, 26)); okB.setText(Local.getString("Ok")); okB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(ActionEvent e) { okB_actionPerformed(e); } }); this.getRootPane().setDefaultButton(okB); cancelB.setMaximumSize(new Dimension(100, 26)); cancelB.setMinimumSize(new Dimension(100, 26)); cancelB.setPreferredSize(new Dimension(100, 26)); cancelB.setText(Local.getString("Cancel")); cancelB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(ActionEvent e) { cancelB_actionPerformed(e); } }); buttonsPanel.add(okB, null); buttonsPanel.add(cancelB, null); this.getContentPane().add(buttonsPanel, BorderLayout.SOUTH); } void okB_actionPerformed(ActionEvent e) { this.dispose(); } void cancelB_actionPerformed(ActionEvent e) { CANCELLED = true; this.dispose(); } private ImageIcon getPreviewIcon(java.io.File file) { ImageIcon tmpIcon = new ImageIcon(file.getPath()); ImageIcon thmb = null; if (tmpIcon.getIconHeight() > 48) { thmb = new ImageIcon(tmpIcon.getImage() .getScaledInstance( -1, 48, Image.SCALE_DEFAULT)); } else { thmb = tmpIcon; } if (thmb.getIconWidth() > 350) { return new ImageIcon(thmb.getImage() .getScaledInstance(350, -1, Image.SCALE_DEFAULT)); } else { return thmb; } } public void updatePreview() { try { if (!(new java.net.URL(fileField.getText()).getPath()).equals("")) header.setIcon(getPreviewIcon(new java.io.File( new java.net.URL(fileField.getText()).getPath()))); } catch (Exception ex) { ex.printStackTrace(); } } public void windowOpened(WindowEvent e) { } public void windowClosing(WindowEvent e) { CANCELLED = true; this.dispose(); } public void windowClosed(WindowEvent e) { } public void windowIconified(WindowEvent e) { } public void windowDeiconified(WindowEvent e) { } public void windowActivated(WindowEvent e) { } public void windowDeactivated(WindowEvent e) { } void browseB_actionPerformed(ActionEvent e) { // Fix until Sun's JVM supports more locales... UIManager.put("FileChooser.lookInLabelText", Local .getString("Look in:")); UIManager.put("FileChooser.upFolderToolTipText", Local.getString( "Up One Level")); UIManager.put("FileChooser.newFolderToolTipText", Local.getString( "Create New Folder")); UIManager.put("FileChooser.listViewButtonToolTipText", Local .getString("List")); UIManager.put("FileChooser.detailsViewButtonToolTipText", Local .getString("Details")); UIManager.put("FileChooser.fileNameLabelText", Local.getString( "File Name:")); UIManager.put("FileChooser.filesOfTypeLabelText", Local.getString( "Files of Type:")); UIManager.put("FileChooser.openButtonText", Local.getString("Open")); UIManager.put("FileChooser.openButtonToolTipText", Local.getString( "Open selected file")); UIManager .put("FileChooser.cancelButtonText", Local.getString("Cancel")); UIManager.put("FileChooser.cancelButtonToolTipText", Local.getString( "Cancel")); JFileChooser chooser = new JFileChooser(); chooser.setFileHidingEnabled(false); chooser.setDialogTitle(Local.getString("Choose an image file")); chooser.setAcceptAllFileFilterUsed(false); chooser.setFileSelectionMode(JFileChooser.FILES_ONLY); chooser.addChoosableFileFilter( new net.sf.memoranda.ui.htmleditor.filechooser.ImageFilter()); chooser.setAccessory( new net.sf.memoranda.ui.htmleditor.filechooser.ImagePreview( chooser)); chooser.setPreferredSize(new Dimension(550, 375)); java.io.File lastSel = (java.io.File) Context.get( "LAST_SELECTED_IMG_FILE"); if (lastSel != null) chooser.setCurrentDirectory(lastSel); if (chooser.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { try { fileField.setText(chooser.getSelectedFile().toURI().toURL().toString()); header.setIcon(getPreviewIcon(chooser.getSelectedFile())); Context .put("LAST_SELECTED_IMG_FILE", chooser .getSelectedFile()); } catch (Exception ex) { fileField.setText(chooser.getSelectedFile().getPath()); } try { ImageIcon img = new ImageIcon(chooser.getSelectedFile() .getPath()); widthField.setText(new Integer(img.getIconWidth()).toString()); heightField .setText(new Integer(img.getIconHeight()).toString()); } catch (Exception ex) { ex.printStackTrace(); } } } }<|fim▁end|>
public JTextField urlField = new JTextField(); JPanel buttonsPanel = new JPanel(new FlowLayout(FlowLayout.RIGHT, 10, 10)); JButton okB = new JButton();
<|file_name|>run_test.py<|end_file_name|><|fim▁begin|>import numdifftools<|fim▁hole|><|fim▁end|>
numdifftools.test()
<|file_name|>issue-13703.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // pretty-expanded FIXME #23616<|fim▁hole|><|fim▁end|>
pub struct Foo<'a, 'b: 'a> { foo: &'a &'b isize } pub fn foo<'a, 'b>(x: Foo<'a, 'b>, _o: Option<& & ()>) { let _y = x.foo; } fn main() {}
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core'; // Visual Recognition import { VisualRecognitionService } from './visual-recognition.service'; @Component({ selector: 'app-root', templateUrl: './app.component.html', styleUrls: ['./app.component.css'] }) export class AppComponent { title = 'CyberusAI';<|fim▁hole|> sources: Array<Object>; constructor(private visualRecognitionService: VisualRecognitionService) { this.sources = [ { src: "assets/videos/VIRAT_S_010000_00_000000_000165.mp4", type: "video/mp4" }, { src: "assets/videos/out.mp4", type: "video/mp4" } ]; } public testWatson() { this.visualRecognitionService.classify("assets/putin.jpg"); } ngOnInit() { } }<|fim▁end|>
<|file_name|>otp_test.go<|end_file_name|><|fim▁begin|>package otp import ( "crypto/sha512" "encoding/base64" "net/http" "net/http/httptest" "testing" "github.com/volatiletech/authboss/v3" "github.com/volatiletech/authboss/v3/mocks" ) type testUser struct { PID string OTPs string } func (t *testUser) GetPID() string { return t.PID } func (t *testUser) PutPID(pid string) { t.PID = pid } func (t *testUser) GetOTPs() string { return t.OTPs } func (t *testUser) PutOTPs(otps string) { t.OTPs = otps } func TestMustBeOTPable(t *testing.T) { t.Parallel() var user authboss.User = &testUser{} _ = MustBeOTPable(user) } func TestInit(t *testing.T) { t.Parallel() ab := authboss.New() router := &mocks.Router{} renderer := &mocks.Renderer{} errHandler := &mocks.ErrorHandler{} ab.Config.Core.Router = router ab.Config.Core.ViewRenderer = renderer ab.Config.Core.ErrorHandler = errHandler o := &OTP{} if err := o.Init(ab); err != nil { t.Fatal(err) } routes := []string{"/otp/login", "/otp/add", "/otp/clear"} if err := router.HasGets(routes...); err != nil { t.Error(err) } if err := router.HasPosts(routes...); err != nil { t.Error(err) } } func TestLoginGet(t *testing.T) { t.Parallel() ab := authboss.New() responder := &mocks.Responder{} ab.Config.Core.Responder = responder a := &OTP{ab} r := mocks.Request("POST") r.URL.RawQuery = "redir=/redirectpage" a.LoginGet(nil, r) if responder.Page != PageLogin { t.Error("wanted login page, got:", responder.Page) } if responder.Status != http.StatusOK { t.Error("wanted ok status, got:", responder.Status) } if got := responder.Data[authboss.FormValueRedirect]; got != "/redirectpage" { t.Error("redirect page was wrong:", got) } } type testHarness struct { otp *OTP ab *authboss.Authboss bodyReader *mocks.BodyReader responder *mocks.Responder redirector *mocks.Redirector session *mocks.ClientStateRW storer *mocks.ServerStorer } func testSetup() *testHarness { harness := &testHarness{} harness.ab = authboss.New() harness.bodyReader = &mocks.BodyReader{} harness.redirector = &mocks.Redirector{} harness.responder = &mocks.Responder{} harness.session = mocks.NewClientRW() harness.storer = mocks.NewServerStorer() harness.ab.Config.Paths.AuthLoginOK = "/login/ok" harness.ab.Config.Core.BodyReader = harness.bodyReader harness.ab.Config.Core.Logger = mocks.Logger{} harness.ab.Config.Core.Responder = harness.responder harness.ab.Config.Core.Redirector = harness.redirector harness.ab.Config.Storage.SessionState = harness.session harness.ab.Config.Storage.Server = harness.storer harness.otp = &OTP{harness.ab} return harness } func TestLoginPostSuccess(t *testing.T) { t.Parallel() setupMore := func(h *testHarness) *testHarness { h.bodyReader.Return = mocks.Values{ PID: "[email protected]", Password: "3cc94671-958a912d-bd5a3ba7-3326a380", } h.storer.Users["[email protected]"] = &mocks.User{ Email: "[email protected]", // 3cc94671-958a912d-bd5a3ba7-3326a380 OTPs: "2aID,2aIDHxmTIy1W7Uyz9c+iqhOJSE0a2Yna3zTRTs2q/X7Bv3xdVjExoztBEG4sQ2Nn3jcaPxdIuhslvSsjaYK5uA==", } h.session.ClientValues[authboss.SessionHalfAuthKey] = "true" return h } t.Run("normal", func(t *testing.T) { t.Parallel() h := setupMore(testSetup()) var beforeCalled, afterCalled bool var beforeHasValues, afterHasValues bool h.ab.Events.Before(authboss.EventAuth, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { beforeCalled = true beforeHasValues = r.Context().Value(authboss.CTXKeyValues) != nil return false, nil }) h.ab.Events.After(authboss.EventAuth, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { afterCalled = true afterHasValues = r.Context().Value(authboss.CTXKeyValues) != nil return false, nil }) r := mocks.Request("POST") resp := httptest.NewRecorder() w := h.ab.NewResponse(resp) if err := h.otp.LoginPost(w, r); err != nil { t.Error(err) } if resp.Code != http.StatusTemporaryRedirect { t.Error("code was wrong:", resp.Code) } if h.redirector.Options.RedirectPath != "/login/ok" { t.Error("redirect path was wrong:", h.redirector.Options.RedirectPath) } if _, ok := h.session.ClientValues[authboss.SessionHalfAuthKey]; ok { t.Error("half auth should have been deleted") } if pid := h.session.ClientValues[authboss.SessionKey]; pid != "[email protected]" { t.Error("pid was wrong:", pid) } // Remaining length of the chunk of base64 is 4 characters if len(h.storer.Users["[email protected]"].OTPs) != 4 { t.Error("the user should have used one of his OTPs") } if !beforeCalled { t.Error("before should have been called") } if !afterCalled { t.Error("after should have been called") } if !beforeHasValues { t.Error("before callback should have access to values") } if !afterHasValues { t.Error("after callback should have access to values") } }) t.Run("handledBefore", func(t *testing.T) { t.Parallel() h := setupMore(testSetup()) var beforeCalled bool h.ab.Events.Before(authboss.EventAuth, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { w.WriteHeader(http.StatusTeapot) beforeCalled = true return true, nil }) r := mocks.Request("POST") resp := httptest.NewRecorder() w := h.ab.NewResponse(resp) if err := h.otp.LoginPost(w, r); err != nil { t.Error(err) } if h.responder.Status != 0 { t.Error("a status should never have been sent back") } if _, ok := h.session.ClientValues[authboss.SessionKey]; ok { t.Error("session key should not have been set") } <|fim▁hole|> } if resp.Code != http.StatusTeapot { t.Error("should have left the response alone once teapot was sent") } }) t.Run("handledAfter", func(t *testing.T) { t.Parallel() h := setupMore(testSetup()) var afterCalled bool h.ab.Events.After(authboss.EventAuth, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { w.WriteHeader(http.StatusTeapot) afterCalled = true return true, nil }) r := mocks.Request("POST") resp := httptest.NewRecorder() w := h.ab.NewResponse(resp) if err := h.otp.LoginPost(w, r); err != nil { t.Error(err) } if h.responder.Status != 0 { t.Error("a status should never have been sent back") } if _, ok := h.session.ClientValues[authboss.SessionKey]; !ok { t.Error("session key should have been set") } if !afterCalled { t.Error("after should have been called") } if resp.Code != http.StatusTeapot { t.Error("should have left the response alone once teapot was sent") } }) } func TestLoginPostBadPassword(t *testing.T) { t.Parallel() setupMore := func(h *testHarness) *testHarness { h.bodyReader.Return = mocks.Values{ PID: "[email protected]", Password: "nope", } h.storer.Users["[email protected]"] = &mocks.User{ Email: "[email protected]", Password: "", // hello world } return h } t.Run("normal", func(t *testing.T) { t.Parallel() h := setupMore(testSetup()) r := mocks.Request("POST") resp := httptest.NewRecorder() w := h.ab.NewResponse(resp) var afterCalled bool h.ab.Events.After(authboss.EventAuthFail, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { afterCalled = true return false, nil }) if err := h.otp.LoginPost(w, r); err != nil { t.Error(err) } if resp.Code != 200 { t.Error("wanted a 200:", resp.Code) } if h.responder.Data[authboss.DataErr] != "Invalid Credentials" { t.Error("wrong error:", h.responder.Data) } if _, ok := h.session.ClientValues[authboss.SessionKey]; ok { t.Error("user should not be logged in") } if !afterCalled { t.Error("after should have been called") } }) t.Run("handledAfter", func(t *testing.T) { t.Parallel() h := setupMore(testSetup()) r := mocks.Request("POST") resp := httptest.NewRecorder() w := h.ab.NewResponse(resp) var afterCalled bool h.ab.Events.After(authboss.EventAuthFail, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { w.WriteHeader(http.StatusTeapot) afterCalled = true return true, nil }) if err := h.otp.LoginPost(w, r); err != nil { t.Error(err) } if h.responder.Status != 0 { t.Error("responder should not have been called to give a status") } if _, ok := h.session.ClientValues[authboss.SessionKey]; ok { t.Error("user should not be logged in") } if !afterCalled { t.Error("after should have been called") } if resp.Code != http.StatusTeapot { t.Error("should have left the response alone once teapot was sent") } }) } func TestAuthPostUserNotFound(t *testing.T) { t.Parallel() harness := testSetup() harness.bodyReader.Return = mocks.Values{ PID: "[email protected]", Password: "world hello", } r := mocks.Request("POST") resp := httptest.NewRecorder() w := harness.ab.NewResponse(resp) // This event is really the only thing that separates // "user not found" from "bad password" var afterCalled bool harness.ab.Events.After(authboss.EventAuthFail, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) { afterCalled = true return false, nil }) if err := harness.otp.LoginPost(w, r); err != nil { t.Error(err) } if resp.Code != 200 { t.Error("wanted a 200:", resp.Code) } if harness.responder.Data[authboss.DataErr] != "Invalid Credentials" { t.Error("wrong error:", harness.responder.Data) } if _, ok := harness.session.ClientValues[authboss.SessionKey]; ok { t.Error("user should not be logged in") } if afterCalled { t.Error("after should not have been called") } } func TestAddGet(t *testing.T) { t.Parallel() h := testSetup() h.storer.Users["[email protected]"] = &mocks.User{ Email: "[email protected]", // 3cc94671-958a912d-bd5a3ba7-3326a380 OTPs: "2aID,2aIDHxmTIy1W7Uyz9c+iqhOJSE0a2Yna3zTRTs2q/X7Bv3xdVjExoztBEG4sQ2Nn3jcaPxdIuhslvSsjaYK5uA==", } h.session.ClientValues[authboss.SessionKey] = "[email protected]" r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) var err error r, err = h.ab.LoadClientState(w, r) if err != nil { t.Fatal(err) } if err := h.otp.AddGet(w, r); err != nil { t.Fatal(err) } if h.responder.Page != PageAdd { t.Error("wanted add page, got:", h.responder.Page) } if h.responder.Status != http.StatusOK { t.Error("wanted ok status, got:", h.responder.Status) } if ln := h.responder.Data[DataNumberOTPs]; ln != "2" { t.Error("want two otps:", ln) } } func TestAddPost(t *testing.T) { t.Parallel() h := testSetup() uname := "[email protected]" h.storer.Users[uname] = &mocks.User{ Email: uname, // 3cc94671-958a912d-bd5a3ba7-3326a380 OTPs: "2aID,2aIDHxmTIy1W7Uyz9c+iqhOJSE0a2Yna3zTRTs2q/X7Bv3xdVjExoztBEG4sQ2Nn3jcaPxdIuhslvSsjaYK5uA==", } h.session.ClientValues[authboss.SessionKey] = uname r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) var err error r, err = h.ab.LoadClientState(w, r) if err != nil { t.Fatal(err) } if err := h.otp.AddPost(w, r); err != nil { t.Fatal(err) } if h.responder.Page != PageAdd { t.Error("wanted add page, got:", h.responder.Page) } if h.responder.Status != http.StatusOK { t.Error("wanted ok status, got:", h.responder.Status) } sum := sha512.Sum512([]byte(h.responder.Data[DataOTP].(string))) encoded := base64.StdEncoding.EncodeToString(sum[:]) otps := splitOTPs(h.storer.Users[uname].OTPs) if len(otps) != 3 || encoded != otps[2] { t.Error("expected one new otp to be appended to the end") } } func TestAddPostTooMany(t *testing.T) { t.Parallel() h := testSetup() uname := "[email protected]" h.storer.Users[uname] = &mocks.User{ Email: uname, OTPs: "2aID,2aID,2aID,2aID,2aID", } h.session.ClientValues[authboss.SessionKey] = uname r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) var err error r, err = h.ab.LoadClientState(w, r) if err != nil { t.Fatal(err) } if err := h.otp.AddPost(w, r); err != nil { t.Fatal(err) } if h.responder.Page != PageAdd { t.Error("wanted add page, got:", h.responder.Page) } if h.responder.Status != http.StatusOK { t.Error("wanted ok status, got:", h.responder.Status) } if len(h.responder.Data[authboss.DataValidation].(string)) == 0 { t.Error("there should have been a validation error") } otps := splitOTPs(h.storer.Users[uname].OTPs) if len(otps) != maxOTPs { t.Error("expected the number of OTPs to be equal to the maximum") } } func TestAddGetUserNotFound(t *testing.T) { t.Parallel() h := testSetup() r := mocks.Request("GET") w := h.ab.NewResponse(httptest.NewRecorder()) if err := h.otp.AddGet(w, r); err != authboss.ErrUserNotFound { t.Error("it should have failed with user not found") } } func TestAddPostUserNotFound(t *testing.T) { t.Parallel() h := testSetup() r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) if err := h.otp.AddPost(w, r); err != authboss.ErrUserNotFound { t.Error("it should have failed with user not found") } } func TestClearGet(t *testing.T) { t.Parallel() h := testSetup() h.storer.Users["[email protected]"] = &mocks.User{ Email: "[email protected]", // 3cc94671-958a912d-bd5a3ba7-3326a380 OTPs: "2aID,2aIDHxmTIy1W7Uyz9c+iqhOJSE0a2Yna3zTRTs2q/X7Bv3xdVjExoztBEG4sQ2Nn3jcaPxdIuhslvSsjaYK5uA==", } h.session.ClientValues[authboss.SessionKey] = "[email protected]" r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) var err error r, err = h.ab.LoadClientState(w, r) if err != nil { t.Fatal(err) } if err := h.otp.ClearGet(w, r); err != nil { t.Fatal(err) } if h.responder.Page != PageClear { t.Error("wanted clear page, got:", h.responder.Page) } if h.responder.Status != http.StatusOK { t.Error("wanted ok status, got:", h.responder.Status) } if ln := h.responder.Data[DataNumberOTPs]; ln != "2" { t.Error("want two otps:", ln) } } func TestClearPost(t *testing.T) { t.Parallel() h := testSetup() uname := "[email protected]" h.storer.Users[uname] = &mocks.User{ Email: uname, // 3cc94671-958a912d-bd5a3ba7-3326a380 OTPs: "2aID,2aIDHxmTIy1W7Uyz9c+iqhOJSE0a2Yna3zTRTs2q/X7Bv3xdVjExoztBEG4sQ2Nn3jcaPxdIuhslvSsjaYK5uA==", } h.session.ClientValues[authboss.SessionKey] = uname r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) var err error r, err = h.ab.LoadClientState(w, r) if err != nil { t.Fatal(err) } if err := h.otp.ClearPost(w, r); err != nil { t.Fatal(err) } if h.responder.Page != PageAdd { t.Error("wanted add page, got:", h.responder.Page) } if h.responder.Status != http.StatusOK { t.Error("wanted ok status, got:", h.responder.Status) } otps := splitOTPs(h.storer.Users[uname].OTPs) if len(otps) != 0 { t.Error("expected all otps to be gone") } } func TestClearGetUserNotFound(t *testing.T) { t.Parallel() h := testSetup() r := mocks.Request("GET") w := h.ab.NewResponse(httptest.NewRecorder()) if err := h.otp.ClearGet(w, r); err != authboss.ErrUserNotFound { t.Error("it should have failed with user not found") } } func TestClearPostUserNotFound(t *testing.T) { t.Parallel() h := testSetup() r := mocks.Request("POST") w := h.ab.NewResponse(httptest.NewRecorder()) if err := h.otp.AddPost(w, r); err != authboss.ErrUserNotFound { t.Error("it should have failed with user not found") } }<|fim▁end|>
if !beforeCalled { t.Error("before should have been called")
<|file_name|>lookupCodingSystem.js<|end_file_name|><|fim▁begin|>'use strict'; <|fim▁hole|>/** * @summary Retrieve a Coding System object. * @param {String} source Abbreviation of the source coding system under test. * @param {Function} callback Function called on process completion, with standard (err, result) signature. * @return {Object} Coding System object * 'sourceAbbreviation' {String} Abbreviated name of the coding system * 'nomenclature' {String} Longer description * 'implementationDate' {String} Implementation date in ISO format * * @memberof LexiconService * @instance */ function lookupCodingSystem(source, cb) { if (!source) { return cb(this.error('INVALID_CODING_SYSTEM_SOURCE_VALUE')); } const findObject = { query: { sourceAbbreviation: source }, projection: serviceUtils.createProjection(['sourceAbbreviation', 'implementationDate']), }; this.find('CodingSystems', findObject, (err, res) => { if (err) { return cb(err); } const codingSystem = _.first(res); const impDate = _.get(codingSystem, 'implementationDate'); if (_.isDate(impDate)) { _.set(codingSystem, 'implementationDate', impDate.toISOString()); } cb(null, codingSystem); }); } module.exports = { lookupCodingSystem, };<|fim▁end|>
const _ = require('lodash'); const serviceUtils = require('../serviceUtils');
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import os, platform sysstr = platform.system() if sysstr == "Windows": LF = '\r\n' elif sysstr == "Linux": LF = '\n' def StripStr(str): # @Function: Remove space(' ') and indent('\t') at the begin and end of the string oldStr = '' newStr = str while oldStr != newStr: oldStr = newStr newStr = oldStr.strip('\t') newStr = newStr.strip(' ') return newStr def SplitStr(str, spliters=None): # @Function: Split string by spliter space(' ') and indent('\t') as default # spliters = [' ', '\t'] # spliters = [] # if spliter is not None: # spliters.append(spliter) if spliters is None: spliters = [' ', '\t'] destStrs = [] srcStrs = [str] while True: oldDestStrs = srcStrs[:] for s in spliters: for srcS in srcStrs: tempStrs = srcS.split(s) for tempS in tempStrs: tempS = StripStr(tempS) if tempS != '': destStrs.append(tempS) srcStrs = destStrs[:] destStrs = [] if oldDestStrs == srcStrs: destStrs = srcStrs[:] break return destStrs def isPathExists(path): if os.path.isdir(path): if os.path.exists(path): return True else: return False else: return False def WriteLog(logfile, contentlist, MODE='replace'): if os.path.exists(logfile): if MODE == 'replace':<|fim▁hole|> logStatus = open(logfile, 'w') else: logStatus = open(logfile, 'a') else: logStatus = open(logfile, 'w') if isinstance(contentlist, list) or isinstance(contentlist,tuple): for content in contentlist: logStatus.write("%s%s" % (content, LF)) else: logStatus.write(contentlist) logStatus.flush() logStatus.close()<|fim▁end|>
os.remove(logfile)
<|file_name|>AccessibilityControllerAtk.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2008, 2009, 2010 Apple Inc. All Rights Reserved. * Copyright (C) 2009 Jan Michael Alonzo * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "AccessibilityController.h" #if HAVE(ACCESSIBILITY) #include "AccessibilityCallbacks.h" #include "AccessibilityUIElement.h" #include "DumpRenderTree.h" #include <atk/atk.h> bool loggingAccessibilityEvents = false; AccessibilityController::AccessibilityController() : m_globalNotificationHandler(nullptr) { } AccessibilityController::~AccessibilityController() { } AccessibilityUIElement AccessibilityController::elementAtPoint(int x, int y) { // FIXME: implement return nullptr; } void AccessibilityController::platformResetToConsistentState() { } void AccessibilityController::setLogFocusEvents(bool) { } void AccessibilityController::setLogScrollingStartEvents(bool) { } void AccessibilityController::setLogValueChangeEvents(bool) { } void AccessibilityController::setLogAccessibilityEvents(bool logAccessibilityEvents) { if (logAccessibilityEvents == loggingAccessibilityEvents) return; if (!logAccessibilityEvents) { loggingAccessibilityEvents = false; disconnectAccessibilityCallbacks(); return; } connectAccessibilityCallbacks(); loggingAccessibilityEvents = true; } bool AccessibilityController::addNotificationListener(JSObjectRef functionCallback) { if (!functionCallback) return false; // Only one global notification listener. if (m_globalNotificationHandler) return false; m_globalNotificationHandler = AccessibilityNotificationHandler::create(); m_globalNotificationHandler->setNotificationFunctionCallback(functionCallback); return true; } void AccessibilityController::removeNotificationListener() { // Programmers should not be trying to remove a listener that's already removed. ASSERT(m_globalNotificationHandler); m_globalNotificationHandler = nullptr; } JSRetainPtr<JSStringRef> AccessibilityController::platformName() const { JSRetainPtr<JSStringRef> platformName(Adopt, JSStringCreateWithUTF8CString("atk")); return platformName; } AtkObject* AccessibilityController::childElementById(AtkObject* parent, const char* id)<|fim▁hole|> return nullptr; bool parentFound = false; AtkAttributeSet* attributeSet(atk_object_get_attributes(parent)); for (AtkAttributeSet* attributes = attributeSet; attributes; attributes = attributes->next) { AtkAttribute* attribute = static_cast<AtkAttribute*>(attributes->data); if (!strcmp(attribute->name, "html-id")) { if (!strcmp(attribute->value, id)) parentFound = true; break; } } atk_attribute_set_free(attributeSet); if (parentFound) return parent; int childCount = atk_object_get_n_accessible_children(parent); for (int i = 0; i < childCount; i++) { AtkObject* result = childElementById(atk_object_ref_accessible_child(parent, i), id); if (ATK_IS_OBJECT(result)) return result; } return nullptr; } #endif<|fim▁end|>
{ if (!ATK_IS_OBJECT(parent))
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod field_names_decoder; use self::field_names_decoder::FieldNamesDecoder; use csv::{self, ByteString, Decoded, Error, NextField, RecordTerminator, Result}; use rustc_serialize::Decodable; use std::fs::File; use std::io::{Cursor, Read}; use std::marker::PhantomData; use std::path::Path; /// A CSV reader that checks the headers. /// /// The lifetime parameter `'a` refers to the lifetime of the predicate used /// for comparing headers to field names. The `R` type parameter refers to the /// type of the underlying reader. /// /// This reader parses CSV data and exposes records via iterators that decode /// into types that implement [`rustc_serialize::Decodable`][Decodable]. This /// reader wraps the reader from the [`csv`][csv] crate to provide a /// [`decode()`](#method.decode) method that checks that the headers match the /// field names in the record type. /// /// If the ordering of the headers in the file doesn't matter for your use /// case, you can ask the reader to [reorder the /// columns](#method.reorder_columns) to match the headers to the corresponding /// field names. You also [ignore unused /// columns](#method.ignore_unused_columns) or [specify an arbitrary matching /// predicate](#method.headers_match_by). /// /// If you don't care whether the headers match the field names at all, see the /// [`csv`][csv] crate. /// /// # Example /// /// This example shows how to do type-based decoding for each record in the CSV /// data: /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// #[derive(RustcDecodable)] /// struct Record { /// count: usize, /// animal: String, /// description: String, /// } /// /// let data = "\ /// count,animal,description /// 7,penguin,happy /// 10,cheetah,fast /// 4,armadillo,armored /// 9,platypus,unique /// 7,mouse,small /// "; /// /// let rdr = typed_csv::Reader::from_string(data); /// for row in rdr.decode() { /// let Record { animal, description, count } = row.unwrap(); /// println!("{}, {}: {}", animal, description, count); /// } /// # } /// ``` /// /// Note that the headers must match the field names in `Record` (although you /// can ask the reader to [reorder the columns](#method.reorder_columns) to /// match the headers to the field names, [ignore unused /// columns](#method.ignore_unused_columns), or [specify an arbitrary matching /// predicate](#method.headers_match_by)). If the header row is incorrect, the /// iterator will return an `Err`: /// /// ```rust /// # extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// # /// # #[derive(RustcDecodable)] /// # struct Record { /// # count: usize, /// # animal: String, /// # description: String, /// # } /// # /// let bad_data = "\ /// bad,header,row /// 7,penguin,happy /// 10,cheetah,fast /// 7,mouse,small /// "; /// /// assert!(typed_csv::Reader::from_string(bad_data) /// .decode() /// .collect::<typed_csv::Result<Vec<Record>>>() /// .is_err()); /// # } /// ``` /// /// [csv]: https://github.com/BurntSushi/rust-csv /// [Decodable]: https://doc.rust-lang.org/rustc-serialize/rustc_serialize/trait.Decodable.html pub struct Reader<'a, R: Read> { csv: csv::Reader<R>, reorder_columns: bool, ignore_unused_columns: bool, headers_match_by: &'a Fn(&[u8], &[u8]) -> bool, } impl<R: Read> Reader<'static, R> { /// Creates a new typed CSV reader from a normal CSV reader. /// /// *Do not make this public!* If it was public, a CSV reader with /// `flexible = true` or `has_headers = false` could be passed in. fn from_csv_reader(csv: csv::Reader<R>) -> Reader<'static, R> { static F: fn(&[u8], &[u8]) -> bool = <[u8]>::eq; Reader { csv: csv, reorder_columns: false, ignore_unused_columns: false, headers_match_by: &F, } } /// Creates a new CSV reader from an arbitrary `io::Read`. /// /// The reader is buffered for you automatically. pub fn from_reader(r: R) -> Reader<'static, R> { Reader::from_csv_reader(csv::Reader::from_reader(r)) } } impl Reader<'static, File> { /// Creates a new CSV reader for the data at the file path given. pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Reader<'static, File>> { Ok(Reader::from_csv_reader(csv::Reader::from_file(path)?)) } } impl Reader<'static, Cursor<Vec<u8>>> { /// Creates a CSV reader for an in memory string buffer. pub fn from_string<S>(s: S) -> Reader<'static, Cursor<Vec<u8>>> where S: Into<String> { Reader::from_csv_reader(csv::Reader::from_string(s)) } /// Creates a CSV reader for an in memory buffer of bytes. pub fn from_bytes<V>(bytes: V) -> Reader<'static, Cursor<Vec<u8>>> where V: Into<Vec<u8>> { Reader::from_csv_reader(csv::Reader::from_bytes(bytes)) } } impl<'a, R: Read> Reader<'a, R> { /// Uses type-based decoding to read a single record from CSV data. /// /// The type that is being decoded into should correspond to *one full CSV /// record*. This can be a single struct, or arbitrarily nested tuples and /// structs, as long as all scalar types (integers, floats, characters, /// strings, single-element tuple structs containing a scalar type, and /// enums with 0 or 1 scalar arguments) are fields in structs. /// /// If the headers don't match the field names or a record cannot be /// decoded into the type requested, an error is returned. See the /// [`reorder_columns`](method.reorder_columns) method if you'd like for /// the reader to automatically reorder columns to match headers to field /// names. /// /// Enums are supported in a limited way. Namely, its variants must have /// exactly `1` parameter each. Each variant decodes based on its /// constituent type and variants are tried in the order that they appear /// in their `enum` definition. See below for examples. /// /// Note that single-element tuple structs (the newtype pattern) are /// supported. Unfortunately, to provide this functionality, a heuristic is /// necessary to differentiate field names in normal structs from those in /// tuple structs. As a result, field names in normal structs should not be /// of the form `_field{}` where `{}` is its position in the struct. /// /// # Examples /// /// This example shows how to decode records into a struct. Note that /// the headers must match the names of the struct members. /// /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Record { /// count: usize, /// animal: String, /// description: String, /// } /// /// let data = "\ /// count,animal,description /// 7,penguin,happy /// 10,cheetah,fast /// 4,armadillo,armored /// "; /// /// let rdr = typed_csv::Reader::from_string(data); /// let rows = rdr.decode().collect::<typed_csv::Result<Vec<Record>>>().unwrap(); /// /// assert_eq!(rows, /// vec![Record { /// count: 7, /// animal: "penguin".to_string(), /// description: "happy".to_string(), /// }, /// Record { /// count: 10, /// animal: "cheetah".to_string(), /// description: "fast".to_string(), /// }, /// Record { /// count: 4, /// animal: "armadillo".to_string(), /// description: "armored".to_string(), /// }]); /// # } /// ``` /// /// We can get a little crazier with custom enum types, `Option` types, /// single-element tuple structs (the newtype pattern), and tuples of /// structs. An `Option` type in particular is useful when a column doesn't /// contain valid data in every record (whether it be empty or malformed). /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct MyUint(u32); /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// enum Number { Integer(i64), Float(f64) } /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Part1 { /// name1: String, /// name2: String, /// dist: Option<MyUint>, /// dist2: Number, /// } /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Part2 { /// size: usize, /// } /// /// let data = "\ /// name1,\"name2\",dist,dist2,size /// foo,bar,1,1,2 /// foo,baz,,1.5,3 /// "; /// /// let mut rdr = typed_csv::Reader::from_string(data); /// let rows = rdr.decode().collect::<typed_csv::Result<Vec<(Part1, Part2)>>>().unwrap(); /// /// assert_eq!(rows, /// vec![(Part1 { /// name1: "foo".to_string(), /// name2: "bar".to_string(), /// dist: Some(MyUint(1)), /// dist2: Number::Integer(1), /// }, /// Part2 { size: 2 }), /// (Part1 { /// name1: "foo".to_string(), /// name2: "baz".to_string(), /// dist: None, /// dist2: Number::Float(1.5), /// }, /// Part2 { size: 3 })]); /// # } /// ``` pub fn decode<D: Decodable>(self) -> DecodedRecords<'a, R, D> { DecodedRecords {<|fim▁hole|> headers_match_by: self.headers_match_by, done_first: false, done: false, column_mapping: Vec::new(), field_count: 0, record_type: PhantomData, } } } impl<'a, R: Read> Reader<'a, R> { /// Allow the reader to reorder columns to match headers to field names. /// /// By default, the headers must match the field names of the decodable /// type exactly, including the order. However, the ordering of field names /// may not be relevant to your data type, so this option is available. /// /// In the case of duplicate field names, the ordering of columns /// corresponding to those fields will be preserved. /// /// # Examples /// /// This is a simple example that demonstrates reordering the columns to /// match headers to field names: /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Record { /// count: usize, /// animal: String, /// description: String, /// } /// /// let data = "\ /// count,description,animal /// 7,happy,penguin /// 10,fast,cheetah /// 4,armored,armadillo /// "; /// /// let rdr = typed_csv::Reader::from_string(data); /// let rows = rdr.reorder_columns(true) /// .decode() /// .collect::<typed_csv::Result<Vec<Record>>>() /// .unwrap(); /// /// assert_eq!(rows, /// vec![Record { /// count: 7, /// animal: "penguin".to_string(), /// description: "happy".to_string(), /// }, /// Record { /// count: 10, /// animal: "cheetah".to_string(), /// description: "fast".to_string(), /// }, /// Record { /// count: 4, /// animal: "armadillo".to_string(), /// description: "armored".to_string(), /// }]); /// # } /// ``` /// /// Duplicate field names in decodable types are fine, and ordering of /// columns with duplicate headers is preserved: /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Animal { /// count: usize, /// animal: String, /// } /// /// let data = "\ /// count,animal,animal,count /// 7,penguin,\"red panda\",2 /// 10,cheetah,fennec,9 /// 4,armadillo,quokka,3 /// "; /// /// type Record = (Animal, Animal); /// /// let rdr = typed_csv::Reader::from_string(data); /// let rows = rdr.reorder_columns(true) /// .decode() /// .collect::<typed_csv::Result<Vec<Record>>>() /// .unwrap(); /// /// assert_eq!(rows, /// vec![(Animal { count: 7, animal: "penguin".to_string() }, /// Animal { count: 2, animal: "red panda".to_string() }), /// (Animal { count: 10, animal: "cheetah".to_string() }, /// Animal { count: 9, animal: "fennec".to_string() }), /// (Animal { count: 4, animal: "armadillo".to_string() }, /// Animal { count: 3, animal: "quokka".to_string() })]); /// # } /// ``` pub fn reorder_columns(mut self, yes: bool) -> Reader<'a, R> { self.reorder_columns = yes; self } /// Allow the reader to ignore unused columns. /// /// By default, the headers must match the field names of the decodable /// type exactly with no extra columns. However, you may not care if there /// are extra, unused, columns in the CSV file, so this option is /// available. /// /// # Example /// /// In this example, the `pattern` column is unused and ignored: /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Record { /// count: usize, /// animal: String, /// description: String, /// } /// /// let data = "\ /// count,animal,description,pattern /// 7,penguin,happy,tuxedo /// 10,cheetah,fast,spotted /// 4,armadillo,armored,scaly /// "; /// /// let rdr = typed_csv::Reader::from_string(data); /// let rows = rdr.ignore_unused_columns(true) /// .decode() /// .collect::<typed_csv::Result<Vec<Record>>>() /// .unwrap(); /// /// assert_eq!(rows, /// vec![Record { /// count: 7, /// animal: "penguin".to_string(), /// description: "happy".to_string(), /// }, /// Record { /// count: 10, /// animal: "cheetah".to_string(), /// description: "fast".to_string(), /// }, /// Record { /// count: 4, /// animal: "armadillo".to_string(), /// description: "armored".to_string(), /// }]); /// # } /// ``` pub fn ignore_unused_columns(mut self, yes: bool) -> Reader<'a, R> { self.ignore_unused_columns = yes; self } /// When matching headers to field names, use the given predicate. /// /// The default is `<[u8]>::eq`. The first argument to the predicate is the /// header, and the second argument is the field name. /// /// # Example /// /// This is an example of using a case-insensitive (ASCII) match: /// /// ```rust /// extern crate rustc_serialize; /// # extern crate typed_csv; /// # fn main() { /// /// use std::ascii::AsciiExt; /// /// #[derive(Debug, PartialEq, RustcDecodable)] /// struct Record { /// count: usize, /// animal: String, /// description: String, /// } /// /// let data = "\ /// COUNT,animal,DeScRiPtIoN /// 7,penguin,happy /// 10,cheetah,fast /// 4,armadillo,armored /// "; /// /// let rdr = typed_csv::Reader::from_string(data); /// let rows = rdr.headers_match_by(&<[u8]>::eq_ignore_ascii_case) /// .decode() /// .collect::<typed_csv::Result<Vec<Record>>>() /// .unwrap(); /// /// assert_eq!(rows, /// vec![Record { /// count: 7, /// animal: "penguin".to_string(), /// description: "happy".to_string(), /// }, /// Record { /// count: 10, /// animal: "cheetah".to_string(), /// description: "fast".to_string(), /// }, /// Record { /// count: 4, /// animal: "armadillo".to_string(), /// description: "armored".to_string(), /// }]); /// # } /// ``` // See https://github.com/Manishearth/rust-clippy/issues/740#issuecomment-277837213 #[allow(unknown_lints)] #[allow(needless_lifetimes)] pub fn headers_match_by<'b, P>(self, pred: &'b P) -> Reader<'b, R> where P: Fn(&[u8], &[u8]) -> bool { Reader { csv: self.csv, reorder_columns: self.reorder_columns, ignore_unused_columns: self.ignore_unused_columns, headers_match_by: pred, } } /// The delimiter to use when reading CSV data. /// /// Since the CSV reader is meant to be mostly encoding agnostic, you must /// specify the delimiter as a single ASCII byte. For example, to read /// tab-delimited data, you would use `b'\t'`. /// /// The default value is `b','`. pub fn delimiter(mut self, delimiter: u8) -> Reader<'a, R> { self.csv = self.csv.delimiter(delimiter); self } /// Set the record terminator to use when reading CSV data. /// /// In the vast majority of situations, you'll want to use the default /// value, `RecordTerminator::CRLF`, which automatically handles `\r`, /// `\n` or `\r\n` as record terminators. (Notably, this is a special /// case since two characters can correspond to a single terminator token.) /// /// However, you may use `RecordTerminator::Any` to specify any ASCII /// character to use as the record terminator. For example, you could /// use `RecordTerminator::Any(b'\n')` to only accept line feeds as /// record terminators, or `b'\x1e'` for the ASCII record separator. pub fn record_terminator(mut self, term: RecordTerminator) -> Reader<'a, R> { self.csv = self.csv.record_terminator(term); self } /// Set the quote character to use when reading CSV data. /// /// Since the CSV reader is meant to be mostly encoding agnostic, you must /// specify the quote as a single ASCII byte. For example, to read /// single quoted data, you would use `b'\''`. /// /// The default value is `b'"'`. /// /// If `quote` is `None`, then no quoting will be used. pub fn quote(mut self, quote: u8) -> Reader<'a, R> { self.csv = self.csv.quote(quote); self } /// Set the escape character to use when reading CSV data. /// /// Since the CSV reader is meant to be mostly encoding agnostic, you must /// specify the escape as a single ASCII byte. /// /// When set to `None` (which is the default), the "doubling" escape /// is used for quote character. /// /// When set to something other than `None`, it is used as the escape /// character for quotes. (e.g., `b'\\'`.) pub fn escape(mut self, escape: Option<u8>) -> Reader<'a, R> { self.csv = self.csv.escape(escape); self } /// Enable double quote escapes. /// /// When disabled, doubled quotes are not interpreted as escapes. pub fn double_quote(mut self, yes: bool) -> Reader<'a, R> { self.csv = self.csv.double_quote(yes); self } /// A convenience method for reading ASCII delimited text. /// /// This sets the delimiter and record terminator to the ASCII unit /// separator (`\x1f`) and record separator (`\x1e`), respectively. /// /// Since ASCII delimited text is meant to be unquoted, this also sets /// `quote` to `None`. pub fn ascii(mut self) -> Reader<'a, R> { self.csv = self.csv.ascii(); self } } /// An iterator of decoded records. /// /// The lifetime parameter `'a` refers to the lifetime of the predicate used /// for comparing headers to field names. The `R` type parameter refers to the /// type of the underlying reader. The `D` type parameter refers to the decoded /// type. pub struct DecodedRecords<'a, R: Read, D: Decodable> { p: csv::Reader<R>, reorder_columns: bool, ignore_unused_columns: bool, headers_match_by: &'a Fn(&[u8], &[u8]) -> bool, done_first: bool, /// Finished reading records or encountered an error. done: bool, /// Indices are column indices and values are the (optional) field indices. column_mapping: Vec<Option<usize>>, field_count: usize, record_type: PhantomData<D>, } /// Determinines mapping of columns to fields according to headers and field names. /// /// The mapping is a `Vec` of indices, where the indices of the `Vec` are the /// column indices, and the values of the `Vec` are the (optional) field indices. /// /// The first argument to the predicate is the header, and the second argument /// is the field name. fn map_headers<P>(headers: &[ByteString], field_names: &[ByteString], reorder: bool, ignore_unused_columns: bool, predicate: &P) -> Result<Vec<Option<usize>>> where P: ?Sized + Fn(&[u8], &[u8]) -> bool { if headers.len() < field_names.len() || (headers.len() > field_names.len() && !ignore_unused_columns) { return Err(Error::Decode(format!("The decodable type has {} field names, but there are \ {} headers", field_names.len(), headers.len()))); } if reorder { let mut mapping = vec![None; headers.len()]; // Headers used so far. let mut headers_used = vec![false; headers.len()]; for (field_index, field_name) in field_names.iter().enumerate() { // Search for the first matching header that hasn't been used yet. let found = headers.iter() .zip(headers_used.iter()) .position(|(header, used)| predicate(header, field_name) && !used); match found { Some(header_index) => { mapping[header_index] = Some(field_index); headers_used[header_index] = true; } None => { return Err(Error::Decode("Headers don't match field names".to_string())); } } } Ok(mapping) } else if ignore_unused_columns { let mut mapping = vec![None; headers.len()]; // Cursor to keep track of starting position in `headers` slice. let mut cursor = 0; for (field_index, field_name) in field_names.iter().enumerate() { // Search for the first matching header, starting from `cursor`. let found = (cursor..headers.len()) .find(|&header_index| predicate(&headers[header_index], field_name)); match found { Some(header_index) => { mapping[header_index] = Some(field_index); cursor = header_index + 1; } None => { return Err(Error::Decode("Headers don't match field names".to_string())); } } } Ok(mapping) } else if headers.iter().zip(field_names).all(|(h, f)| predicate(h, f)) { Ok((0..headers.len()).map(|i| Some(i)).collect()) } else { Err(Error::Decode("Headers don't match field names".to_string())) } } impl<'a, R: Read, D: Decodable> DecodedRecords<'a, R, D> { /// Processes the first row, setting `self.done_first, `self.field_count`, /// and `self.column_mapping`. /// /// This method is idempotent and fast on subsequent calls (since it uses /// `self.done_first` to track whether it's been called before). fn process_first_row(&mut self) -> Result<()> { if !self.done_first { self.done_first = true; // Always consume the header record. If headers have been read // before this point, then this is equivalent to a harmless clone // (and no parser progression). let headers = self.p.byte_headers(); // If the header row is empty, then the CSV data contains no records. if headers.as_ref().map(|r| r.is_empty()).unwrap_or(false) { assert!(self.p.done()); return Ok(()); } // Otherwise, unwrap the headers. let headers = headers?; // Get the field names of the decodable type and set // `self.field_count`. let mut field_names_decoder = FieldNamesDecoder::new(); D::decode(&mut field_names_decoder)?; let field_names = field_names_decoder.into_field_names(); // Set `field_count` and `column_mapping`. self.field_count = field_names.len(); self.column_mapping = map_headers(&headers, &field_names, self.reorder_columns, self.ignore_unused_columns, self.headers_match_by)?; } Ok(()) } /// This is wrapped in the `next()` method to ensure that `self.done` is /// always set properly. fn next_impl(&mut self) -> Option<Result<D>> { if let Err(err) = self.process_first_row() { return Some(Err(err)); } if self.p.done() { return None; } let mut record = vec![Vec::new(); self.field_count]; let mut column = 0; loop { match self.p.next_bytes() { NextField::EndOfRecord | NextField::EndOfCsv => { if record.is_empty() { return None; } break; } NextField::Error(err) => { return Some(Err(err)); } NextField::Data(field) => { if column < self.column_mapping.len() { if let Some(field_index) = self.column_mapping[column] { record[field_index] = field.to_vec(); } column += 1; } else { return Some(Err(Error::Decode("More data columns than headers" .to_string()))); } } } } Some(Decodable::decode(&mut Decoded::new(record))) } } impl<'a, R: Read, D: Decodable> Iterator for DecodedRecords<'a, R, D> { type Item = Result<D>; fn next(&mut self) -> Option<Result<D>> { if self.done { None } else { let next = self.next_impl(); match next { None | Some(Err(_)) => self.done = true, _ => () } next } } } #[cfg(test)] mod tests { use super::{Reader, Result}; use std::ascii::AsciiExt; #[derive(Debug, PartialEq, RustcDecodable)] struct SimpleStruct { a: usize, b: usize, } #[test] fn test_struct() { let rdr = Reader::from_string("a,b\n0,1\n2,3\n"); let records = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap(); assert_eq!(records, vec![SimpleStruct { a: 0, b: 1 }, SimpleStruct { a: 2, b: 3 }]); } #[test] fn test_struct_allow_reorder() { let rdr = Reader::from_string("b,a\n0,1\n2,3\n"); let records = rdr.reorder_columns(true).decode().collect::<Result<Vec<SimpleStruct>>>().unwrap(); assert_eq!(records, vec![SimpleStruct { a: 1, b: 0 }, SimpleStruct { a: 3, b: 2 }]); } #[test] fn test_struct_ignore_unused_columns() { let rdr = Reader::from_string("a,b,c\n0,1,2\n3,4,5\n"); let records = rdr.ignore_unused_columns(true) .decode() .collect::<Result<Vec<SimpleStruct>>>() .unwrap(); assert_eq!(records, vec![SimpleStruct { a: 0, b: 1 }, SimpleStruct { a: 3, b: 4 }]); } #[test] fn test_struct_allow_reorder_and_ignore_unused_columns() { let rdr = Reader::from_string("b,c,a\n0,1,2\n3,4,5\n"); let records = rdr.reorder_columns(true) .ignore_unused_columns(true) .decode() .collect::<Result<Vec<SimpleStruct>>>() .unwrap(); assert_eq!(records, vec![SimpleStruct { a: 2, b: 0 }, SimpleStruct { a: 5, b: 3 }]); } #[test] fn test_struct_headers_match_by() { let rdr = Reader::from_string("a,B\n0,1\n2,3\n"); let records = rdr.headers_match_by(&<[u8]>::eq_ignore_ascii_case) .decode() .collect::<Result<Vec<SimpleStruct>>>() .unwrap(); assert_eq!(records, vec![SimpleStruct { a: 0, b: 1 }, SimpleStruct { a: 2, b: 3 }]); } #[test] fn test_struct_reordered_headers() { let rdr = Reader::from_string("b,a\n0,1\n2,3\n"); let err = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: Headers don't match field names".to_string()); } #[test] fn test_struct_wrong_case() { let rdr = Reader::from_string("a,B\n0,1\n2,3\n"); let err = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: Headers don't match field names".to_string()); } #[test] fn test_struct_misnamed_headers() { let rdr = Reader::from_string("c,d\n0,1\n"); let err = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: Headers don't match field names".to_string()); } #[test] fn test_struct_missing_header() { let rdr = Reader::from_string("a\n0\n"); let err = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: The decodable type has 2 field names, but there are 1 \ headers" .to_string()); } #[test] fn test_struct_extra_header() { let rdr = Reader::from_string("a,b,c\n0,1\n"); let err = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: The decodable type has 2 field names, but there are 3 \ headers" .to_string()); } #[test] fn test_struct_extra_data_column() { let rdr = Reader::from_string("a,b\n0,1,2\n"); let err = rdr.decode().collect::<Result<Vec<SimpleStruct>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: More data columns than headers".to_string()); } #[test] fn test_tuple_of_structs() { let rdr = Reader::from_string("a,b,a,b\n0,1,2,3\n4,5,6,7\n"); let records = rdr.decode().collect::<Result<Vec<(SimpleStruct, SimpleStruct)>>>().unwrap(); assert_eq!(records, vec![(SimpleStruct { a: 0, b: 1 }, SimpleStruct { a: 2, b: 3 }), (SimpleStruct { a: 4, b: 5 }, SimpleStruct { a: 6, b: 7 })]); } #[test] fn test_tuple_of_structs_allow_reorder() { let rdr = Reader::from_string("b,a,a,b\n0,1,2,3\n\n4,5,6,7\n"); let records = rdr.reorder_columns(true) .decode() .collect::<Result<Vec<(SimpleStruct, SimpleStruct)>>>() .unwrap(); assert_eq!(records, vec![(SimpleStruct { a: 1, b: 0 }, SimpleStruct { a: 2, b: 3 }), (SimpleStruct { a: 5, b: 4 }, SimpleStruct { a: 6, b: 7 })]); } #[test] fn test_tuple_of_structs_misnamed_headers() { let rdr = Reader::from_string("a,b,c,d\n0,1,2,3\n4,5,6,7\n"); let err = rdr.decode().collect::<Result<Vec<(SimpleStruct, SimpleStruct)>>>().unwrap_err(); assert_eq!(format!("{}", err), "CSV decode error: Headers don't match field names".to_string()); } }<|fim▁end|>
p: self.csv, reorder_columns: self.reorder_columns, ignore_unused_columns: self.ignore_unused_columns,
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 # # tBB is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # tBB is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. #<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>. """ This module takes care of representing and handling settings throughout tBB. """ import enum import re import datetime valid_item_name = re.compile(r'[a-zA-Z_][a-zA-Z0-9_]*') timedelta_parse_string = '%M:%S' class UndefinedValueException(Exception): def __init__(self): super().__init__('self.value needs to be defined before converting it.') class ConversionException(Exception): def __init__(self, value, value_type): super().__init__("couldn't convert '{}' to {}.".format(value, value_type)) class UnknownSettingException(Exception): def __init__(self, setting_path): self.setting_path = setting_path super().__init__("defined setting '{}' is unknown.".format(setting_path)) class InconsistentSettingTypeException(Exception): def __init__(self, setting_path, should_be, got): self.setting_path = setting_path self.should_be = should_be self.got = got super().__init__("setting type for '{}' should be {}. Got: {}.".format(setting_path, should_be, got)) class SettingsTypes(enum.Enum): unknown = -1 string = 0 # need no conversion integer = 1 # need no conversion boolean = 2 # need no conversion timedelta = 3 settings_item = 4 list = 5 # need no conversion class SettingsItem: def __init__(self, name, value_type): if type(name) != str: raise TypeError('argument name must be a string.') if not isinstance(value_type, SettingsTypes): raise TypeError('argument value_types must be a SettingsTypes instance.') if re.match(valid_item_name, name) is None: raise ValueError('settings item name is not acceptable. See tBB.settings.valid_item_name.') else: try: self.__getattribute__(name) except AttributeError: pass else: raise ValueError('settings item name is not acceptable. Name reserved.') self.name = name self.value = None self.value_type = value_type def convert(self): if self.value is None: raise UndefinedValueException() # static conversions: basically, only do type checking if self.value_type == SettingsTypes.string: if type(self.value) != str: raise ConversionException(self.value, self.value_type) elif self.value_type == SettingsTypes.integer: if type(self.value) != int: raise ConversionException(self.value, self.value_type) elif self.value_type == SettingsTypes.boolean: if type(self.value) != bool: raise ConversionException(self.value, self.value_type) # complex conversions elif self.value_type == SettingsTypes.timedelta: self.value = self.convert_to_timedelta(self.value) elif self.value_type == SettingsTypes.settings_item: self.value = self.convert_to_settings_item(self.value) elif self.value_type == SettingsTypes.unknown: # make a guess on what it could be if type(self.value) == int: self.value_type = SettingsTypes.integer elif type(self.value) == bool: self.value_type = SettingsTypes.boolean elif type(self.value) == list: self.value_type = SettingsTypes.list elif type(self.value) == dict: try: self.value = self.convert_to_settings_item(self.value) except ConversionException as exc: raise exc else: self.value_type = SettingsTypes.settings_item elif type(self.value) == str: try: self.value = self.convert_to_timedelta(self.value) except ConversionException: self.value_type = SettingsTypes.string else: self.value_type = SettingsTypes.timedelta @staticmethod def convert_to_timedelta(value): try: tmp = datetime.datetime.strptime(value, timedelta_parse_string) return datetime.timedelta(minutes=tmp.minute, seconds=tmp.second) except (ValueError, TypeError) as exc: raise ConversionException(value, SettingsTypes.timedelta) from exc @staticmethod def convert_to_settings_item(value): if type(value) != dict: raise ConversionException(value, SettingsTypes.settings_item) children = {} for name, elem in value.items(): new_item = SettingsItem(name=name, value_type=SettingsTypes.unknown) new_item.value = elem children[name] = new_item for child in children.values(): child.convert() return children def __getattr__(self, item): try: return self.__getattribute__(item) except AttributeError as exc: if self.value_type == SettingsTypes.settings_item and self.value is not None: if item in self.value.keys(): return self.value[item] else: raise exc def __repr__(self): return "<{} '{}' ({})>".format(self.__class__.__name__, self.name, self.value_type) class Settings: def __init__(self, tree): if not isinstance(tree, SettingsItem): raise TypeError("expected SettingsItem instance for argument tree. Got: '{}'.".format(tree)) self.tree = tree def update(self, new_tree, scope=''): if not isinstance(new_tree, SettingsItem): raise TypeError("expected SettingsItem instance for argument tree. " "Got: '{}'.".format(new_tree)) if type(new_tree.value) != dict: walked_path = 'self.tree' try: setting = self.tree for selector in scope.split('.')[1:]: walked_path += '.' + selector setting = getattr(setting, selector) except AttributeError: raise UnknownSettingException(walked_path) else: if setting.value_type != new_tree.value_type: raise InconsistentSettingTypeException(scope, setting.value_type, new_tree.value_type) setting.value = new_tree.value else: for name in new_tree.value: if new_tree.value_type == SettingsTypes.settings_item: self.update(new_tree.value[name], scope=self.tree.name+scope+'.'+name) else: raise TypeError("expected iterators inside new_tree to be SettingsTypes.settings_item. " "Got: {}".format(new_tree.value_type)) @staticmethod def parse(json_data, name='toplevel'): tree = SettingsItem(name=name, value_type=SettingsTypes.settings_item) tree.value = json_data tree.convert() return tree def __getattr__(self, item): return self.tree.__getattr__(item)<|fim▁end|>
# You should have received a copy of the GNU General Public License
<|file_name|>block_metadata_config_tests.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{ config::{ block_metadata::{build_block_metadata, is_new_block, Entry}, global::Config as GlobalConfig, }, errors::*, tests::{ global_config_tests::parse_and_build_config as parse_and_build_global_config, parse_each_line_as, }, }; use diem_types::block_metadata::BlockMetadata; #[test] fn parse_simple_positive() { for s in &[ "//! proposer: alice", "//! proposer\t:\tfoobar42", "//!\nproposer\n:\nfoobar42", ] { s.parse::<Entry>().unwrap(); } } #[test] fn parse_simple_negative() { for s in &["//!", "//! ", "//! sender: alice", "//! proposer:"] { s.parse::<Entry>().unwrap_err(); } } #[test] fn parse_timestamp() { for s in &[ "//! block-time:77", "//!block-time:0", "//! block-time: 123", ] { s.parse::<Entry>().unwrap(); } for s in &[ "//!block-time:", "//!block-time:abc", "//!block-time: 123, 45", ] { s.parse::<Entry>().unwrap_err(); } } #[test] fn parse_new_transaction() { assert!(is_new_block("//! block-prologue")); assert!(is_new_block("//!block-prologue ")); assert!(!is_new_block("//")); assert!(!is_new_block("//! new block")); assert!(!is_new_block("//! block")); } fn parse_and_build_config(global_config: &GlobalConfig, s: &str) -> Result<BlockMetadata> { build_block_metadata(&global_config, &parse_each_line_as::<Entry>(s)?) } #[rustfmt::skip] #[test] fn build_transaction_config_1() { let global = parse_and_build_global_config(r" //! account: alice ").unwrap(); parse_and_build_config(&global, r" //! proposer: alice //! block-time: 6 ").unwrap(); parse_and_build_config(&global, r" //! proposer: alice ").unwrap_err(); parse_and_build_config(&global, r" //! block-time: 6 ").unwrap_err(); } #[rustfmt::skip] #[test] fn build_transaction_config_3() {<|fim▁hole|> ").unwrap(); parse_and_build_config(&global, r" //! proposer: bob //! block-time: 6 ").unwrap_err(); }<|fim▁end|>
let global = parse_and_build_global_config(r" //! account: alice
<|file_name|>qsr_rcc8.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import print_function, division from qsrlib_qsrs.qsr_rcc_abstractclass import QSR_RCC_Abstractclass class QSR_RCC8(QSR_RCC_Abstractclass): """Symmetrical RCC5 relations. Values of the abstract properties * **_unique_id** = "rcc8" * **_all_possible_relations** = ("dc", "ec", "po", "eq", "tpp", "ntpp", "tppi", "ntppi") * **_dtype** = "bounding_boxes_2d" QSR specific `dynamic_args` * **'quantisation_factor'** (*float*) = 0.0: Threshold that determines whether two rectangle regions are disconnected. .. seealso:: For further details about RCC8, refer to its :doc:`description. <../handwritten/qsrs/rcc8>` """ _unique_id = "rcc8" """str: Unique identifier name of the QSR.""" _all_possible_relations = ("dc", "ec", "po", "eq", "tpp", "ntpp", "tppi", "ntppi") """tuple: All possible relations of the QSR.""" def __init__(self): """Constructor.""" super(QSR_RCC8, self).__init__() def _convert_to_requested_rcc_type(self, qsr): """No need for remapping. :param qsr: RCC8 value. :type qsr: str :return: RCC8 value.<|fim▁hole|><|fim▁end|>
:rtype: str """ return qsr
<|file_name|>_default.js<|end_file_name|><|fim▁begin|>function fixPosition() { console.log($(window).scrollTop()); // if its anywhee but at the very top of the screen, fix it if ($(window).scrollTop() >= headerHeight) { // magic number offset that just feels right to prevent the 'bounce' // if (headPosition.top > 0){ $('body').addClass('js-fix-positions'); } // otherwise, make sure its not fixed else { $('body').removeClass('js-fix-positions'); } }; //Generate the weight guide meter with JS as its pretty useless without it, without some server side intervention function createBasketWeightGuide(){ // create the element for the guide $('.af__basket__weight-guide--label').after('<div class="js-af__weight-guide__wrapper"></div>'); $('.js-af__weight-guide__wrapper').append('<div class="js-af__weight-guide__meter"></div>'); $('.af__product__add-to-basket').submit(function(e){ e.preventDefault(); //var $multiplier= weightGuideListener(); }) } function weightGuideListener(){ var $bar = $('.js-af__weight-guide__meter'); //Didnt work as expected, so used this for speed: http://stackoverflow.com/questions/12945352/change-width-on-click-using-jquery var $percentage = (100 * parseFloat($bar.css('width')) / parseFloat($bar.parent().css('width')) +10 + '%'); $bar.css("width", $percentage); var $message = $('.af__basket__weight-guide--cta'); <|fim▁hole|> currentWidth=parseInt($percentage); console.log(currentWidth); // cannot use switch for less than if (currentWidth <= 21 ){ $message.text('Plenty of room'); } else if (currentWidth <= 45){ $bar.css('background-color', '#ee0'); } else if (currentWidth <= 65){ $bar.css('background-color', '#c1ea39'); $message.text('Getting there...') } else if (currentWidth <= 80){ $message.text('Room for a little one?'); } else if (currentWidth <= 89){ $message.text('Almost full!'); } else if (currentWidth >= 95 && currentWidth <= 99){ $message.text('Lookin\' good!'); } else if (currentWidth <= 99.9){ $bar.css('background-color', '#3ece38'); } else { $bar.css('background-color', '#d00'); $bar.css("width", "100%"); $message.text('(Delivery band 2 logic)'); } } function selectOnFocus(){ $('.af__product__add-to-basket input').focus(function(){ this.select(); }) }; $(document).ready(function(){ headerHeight=$('.af__header').outerHeight(); // icnludes padding and margins scrollIntervalID = setInterval(fixPosition, 16); // = 60 FPS createBasketWeightGuide(); selectOnFocus(); });<|fim▁end|>
<|file_name|>canvas-test.js<|end_file_name|><|fim▁begin|>import { moduleFor, test } from 'ember-qunit'; <|fim▁hole|> // needs: ['service:foo'] }); // Replace this with your real tests. test('it exists', function(assert) { var service = this.subject(); assert.ok(service); });<|fim▁end|>
moduleFor('service:canvas', { // Specify the other units that are required for this test.